mirror of
https://codeberg.org/forgejo/forgejo.git
synced 2024-11-17 11:36:22 +00:00
Same perl replacement as https://github.com/go-gitea/gitea/pull/25686 but for 1.20 to ease future backporting.
This commit is contained in:
parent
4e310133f9
commit
24e64fe372
|
@ -25,7 +25,7 @@ import (
|
|||
|
||||
var optionLogVerbose bool
|
||||
|
||||
func logVerbose(msg string, args ...interface{}) {
|
||||
func logVerbose(msg string, args ...any) {
|
||||
if optionLogVerbose {
|
||||
log.Printf(msg, args...)
|
||||
}
|
||||
|
|
|
@ -63,7 +63,7 @@ Outputs to 'cert.pem' and 'key.pem' and will overwrite existing files.`,
|
|||
},
|
||||
}
|
||||
|
||||
func publicKey(priv interface{}) interface{} {
|
||||
func publicKey(priv any) any {
|
||||
switch k := priv.(type) {
|
||||
case *rsa.PrivateKey:
|
||||
return &k.PublicKey
|
||||
|
@ -74,7 +74,7 @@ func publicKey(priv interface{}) interface{} {
|
|||
}
|
||||
}
|
||||
|
||||
func pemBlockForKey(priv interface{}) *pem.Block {
|
||||
func pemBlockForKey(priv any) *pem.Block {
|
||||
switch k := priv.(type) {
|
||||
case *rsa.PrivateKey:
|
||||
return &pem.Block{Type: "RSA PRIVATE KEY", Bytes: x509.MarshalPKCS1PrivateKey(k)}
|
||||
|
@ -94,7 +94,7 @@ func runCert(c *cli.Context) error {
|
|||
return err
|
||||
}
|
||||
|
||||
var priv interface{}
|
||||
var priv any
|
||||
var err error
|
||||
switch c.String("ecdsa-curve") {
|
||||
case "":
|
||||
|
|
|
@ -161,7 +161,7 @@ It can be used for backup and capture Gitea server image to send to maintainer`,
|
|||
},
|
||||
}
|
||||
|
||||
func fatal(format string, args ...interface{}) {
|
||||
func fatal(format string, args ...any) {
|
||||
fmt.Fprintf(os.Stderr, format+"\n", args...)
|
||||
log.Fatal(format, args...)
|
||||
}
|
||||
|
@ -236,7 +236,7 @@ func runDump(ctx *cli.Context) error {
|
|||
return err
|
||||
}
|
||||
|
||||
var iface interface{}
|
||||
var iface any
|
||||
if fileName == "-" {
|
||||
iface, err = archiver.ByExtension(fmt.Sprintf(".%s", outType))
|
||||
} else {
|
||||
|
|
|
@ -178,7 +178,7 @@ func runAddConnLogger(c *cli.Context) error {
|
|||
defer cancel()
|
||||
|
||||
setup(ctx, c.Bool("debug"))
|
||||
vals := map[string]interface{}{}
|
||||
vals := map[string]any{}
|
||||
mode := "conn"
|
||||
vals["net"] = "tcp"
|
||||
if c.IsSet("protocol") {
|
||||
|
@ -208,7 +208,7 @@ func runAddFileLogger(c *cli.Context) error {
|
|||
defer cancel()
|
||||
|
||||
setup(ctx, c.Bool("debug"))
|
||||
vals := map[string]interface{}{}
|
||||
vals := map[string]any{}
|
||||
mode := "file"
|
||||
if c.IsSet("filename") {
|
||||
vals["filename"] = c.String("filename")
|
||||
|
@ -236,7 +236,7 @@ func runAddFileLogger(c *cli.Context) error {
|
|||
return commonAddLogger(c, mode, vals)
|
||||
}
|
||||
|
||||
func commonAddLogger(c *cli.Context, mode string, vals map[string]interface{}) error {
|
||||
func commonAddLogger(c *cli.Context, mode string, vals map[string]any) error {
|
||||
if len(c.String("level")) > 0 {
|
||||
vals["level"] = log.LevelFromString(c.String("level")).String()
|
||||
}
|
||||
|
|
|
@ -95,7 +95,7 @@ var (
|
|||
|
||||
// fail prints message to stdout, it's mainly used for git serv and git hook commands.
|
||||
// The output will be passed to git client and shown to user.
|
||||
func fail(ctx context.Context, userMessage, logMsgFmt string, args ...interface{}) error {
|
||||
func fail(ctx context.Context, userMessage, logMsgFmt string, args ...any) error {
|
||||
if userMessage == "" {
|
||||
userMessage = "Internal Server Error (no specific error)"
|
||||
}
|
||||
|
|
|
@ -44,7 +44,7 @@ func init() {
|
|||
// TranslatableMessage represents JSON struct that can be translated with a Locale
|
||||
type TranslatableMessage struct {
|
||||
Format string
|
||||
Args []interface{} `json:"omitempty"`
|
||||
Args []any `json:"omitempty"`
|
||||
}
|
||||
|
||||
// LoadRepo loads repository of the task
|
||||
|
|
|
@ -47,7 +47,7 @@ var sshOpLocker sync.Mutex
|
|||
// AuthorizedStringForKey creates the authorized keys string appropriate for the provided key
|
||||
func AuthorizedStringForKey(key *PublicKey) string {
|
||||
sb := &strings.Builder{}
|
||||
_ = setting.SSH.AuthorizedKeysCommandTemplateTemplate.Execute(sb, map[string]interface{}{
|
||||
_ = setting.SSH.AuthorizedKeysCommandTemplateTemplate.Execute(sb, map[string]any{
|
||||
"AppPath": util.ShellEscape(setting.AppPath),
|
||||
"AppWorkPath": util.ShellEscape(setting.AppWorkPath),
|
||||
"CustomConf": util.ShellEscape(setting.CustomConf),
|
||||
|
@ -175,7 +175,7 @@ func RewriteAllPublicKeys() error {
|
|||
|
||||
// RegeneratePublicKeys regenerates the authorized_keys file
|
||||
func RegeneratePublicKeys(ctx context.Context, t io.StringWriter) error {
|
||||
if err := db.GetEngine(ctx).Where("type != ?", KeyTypePrincipal).Iterate(new(PublicKey), func(idx int, bean interface{}) (err error) {
|
||||
if err := db.GetEngine(ctx).Where("type != ?", KeyTypePrincipal).Iterate(new(PublicKey), func(idx int, bean any) (err error) {
|
||||
_, err = t.WriteString((bean.(*PublicKey)).AuthorizedString())
|
||||
return err
|
||||
}); err != nil {
|
||||
|
|
|
@ -97,7 +97,7 @@ func RewriteAllPrincipalKeys(ctx context.Context) error {
|
|||
}
|
||||
|
||||
func regeneratePrincipalKeys(ctx context.Context, t io.StringWriter) error {
|
||||
if err := db.GetEngine(ctx).Where("type = ?", KeyTypePrincipal).Iterate(new(PublicKey), func(idx int, bean interface{}) (err error) {
|
||||
if err := db.GetEngine(ctx).Where("type = ?", KeyTypePrincipal).Iterate(new(PublicKey), func(idx int, bean any) (err error) {
|
||||
_, err = t.WriteString((bean.(*PublicKey)).AuthorizedString())
|
||||
return err
|
||||
}); err != nil {
|
||||
|
|
|
@ -52,7 +52,7 @@ func (ctx *Context) Engine() Engine {
|
|||
}
|
||||
|
||||
// Value shadows Value for context.Context but allows us to get ourselves and an Engined object
|
||||
func (ctx *Context) Value(key interface{}) interface{} {
|
||||
func (ctx *Context) Value(key any) any {
|
||||
if key == enginedContextKey {
|
||||
return ctx
|
||||
}
|
||||
|
@ -163,28 +163,28 @@ func txWithNoCheck(parentCtx context.Context, f func(ctx context.Context) error)
|
|||
}
|
||||
|
||||
// Insert inserts records into database
|
||||
func Insert(ctx context.Context, beans ...interface{}) error {
|
||||
func Insert(ctx context.Context, beans ...any) error {
|
||||
_, err := GetEngine(ctx).Insert(beans...)
|
||||
return err
|
||||
}
|
||||
|
||||
// Exec executes a sql with args
|
||||
func Exec(ctx context.Context, sqlAndArgs ...interface{}) (sql.Result, error) {
|
||||
func Exec(ctx context.Context, sqlAndArgs ...any) (sql.Result, error) {
|
||||
return GetEngine(ctx).Exec(sqlAndArgs...)
|
||||
}
|
||||
|
||||
// GetByBean filled empty fields of the bean according non-empty fields to query in database.
|
||||
func GetByBean(ctx context.Context, bean interface{}) (bool, error) {
|
||||
func GetByBean(ctx context.Context, bean any) (bool, error) {
|
||||
return GetEngine(ctx).Get(bean)
|
||||
}
|
||||
|
||||
// DeleteByBean deletes all records according non-empty fields of the bean as conditions.
|
||||
func DeleteByBean(ctx context.Context, bean interface{}) (int64, error) {
|
||||
func DeleteByBean(ctx context.Context, bean any) (int64, error) {
|
||||
return GetEngine(ctx).Delete(bean)
|
||||
}
|
||||
|
||||
// DeleteByID deletes the given bean with the given ID
|
||||
func DeleteByID(ctx context.Context, id int64, bean interface{}) (int64, error) {
|
||||
func DeleteByID(ctx context.Context, id int64, bean any) (int64, error) {
|
||||
return GetEngine(ctx).ID(id).NoAutoTime().Delete(bean)
|
||||
}
|
||||
|
||||
|
@ -203,13 +203,13 @@ func FindIDs(ctx context.Context, tableName, idCol string, cond builder.Cond) ([
|
|||
|
||||
// DecrByIDs decreases the given column for entities of the "bean" type with one of the given ids by one
|
||||
// Timestamps of the entities won't be updated
|
||||
func DecrByIDs(ctx context.Context, ids []int64, decrCol string, bean interface{}) error {
|
||||
func DecrByIDs(ctx context.Context, ids []int64, decrCol string, bean any) error {
|
||||
_, err := GetEngine(ctx).Decr(decrCol).In("id", ids).NoAutoCondition().NoAutoTime().Update(bean)
|
||||
return err
|
||||
}
|
||||
|
||||
// DeleteBeans deletes all given beans, beans must contain delete conditions.
|
||||
func DeleteBeans(ctx context.Context, beans ...interface{}) (err error) {
|
||||
func DeleteBeans(ctx context.Context, beans ...any) (err error) {
|
||||
e := GetEngine(ctx)
|
||||
for i := range beans {
|
||||
if _, err = e.Delete(beans[i]); err != nil {
|
||||
|
@ -220,7 +220,7 @@ func DeleteBeans(ctx context.Context, beans ...interface{}) (err error) {
|
|||
}
|
||||
|
||||
// TruncateBeans deletes all given beans, beans may contain delete conditions.
|
||||
func TruncateBeans(ctx context.Context, beans ...interface{}) (err error) {
|
||||
func TruncateBeans(ctx context.Context, beans ...any) (err error) {
|
||||
e := GetEngine(ctx)
|
||||
for i := range beans {
|
||||
if _, err = e.Truncate(beans[i]); err != nil {
|
||||
|
@ -231,12 +231,12 @@ func TruncateBeans(ctx context.Context, beans ...interface{}) (err error) {
|
|||
}
|
||||
|
||||
// CountByBean counts the number of database records according non-empty fields of the bean as conditions.
|
||||
func CountByBean(ctx context.Context, bean interface{}) (int64, error) {
|
||||
func CountByBean(ctx context.Context, bean any) (int64, error) {
|
||||
return GetEngine(ctx).Count(bean)
|
||||
}
|
||||
|
||||
// TableName returns the table name according a bean object
|
||||
func TableName(bean interface{}) string {
|
||||
func TableName(bean any) string {
|
||||
return x.TableName(bean)
|
||||
}
|
||||
|
||||
|
|
|
@ -25,7 +25,7 @@ import (
|
|||
|
||||
var (
|
||||
x *xorm.Engine
|
||||
tables []interface{}
|
||||
tables []any
|
||||
initFuncs []func() error
|
||||
|
||||
// HasEngine specifies if we have a xorm.Engine
|
||||
|
@ -34,41 +34,41 @@ var (
|
|||
|
||||
// Engine represents a xorm engine or session.
|
||||
type Engine interface {
|
||||
Table(tableNameOrBean interface{}) *xorm.Session
|
||||
Count(...interface{}) (int64, error)
|
||||
Decr(column string, arg ...interface{}) *xorm.Session
|
||||
Delete(...interface{}) (int64, error)
|
||||
Truncate(...interface{}) (int64, error)
|
||||
Exec(...interface{}) (sql.Result, error)
|
||||
Find(interface{}, ...interface{}) error
|
||||
Get(beans ...interface{}) (bool, error)
|
||||
ID(interface{}) *xorm.Session
|
||||
In(string, ...interface{}) *xorm.Session
|
||||
Incr(column string, arg ...interface{}) *xorm.Session
|
||||
Insert(...interface{}) (int64, error)
|
||||
Iterate(interface{}, xorm.IterFunc) error
|
||||
Join(joinOperator string, tablename, condition interface{}, args ...interface{}) *xorm.Session
|
||||
SQL(interface{}, ...interface{}) *xorm.Session
|
||||
Where(interface{}, ...interface{}) *xorm.Session
|
||||
Table(tableNameOrBean any) *xorm.Session
|
||||
Count(...any) (int64, error)
|
||||
Decr(column string, arg ...any) *xorm.Session
|
||||
Delete(...any) (int64, error)
|
||||
Truncate(...any) (int64, error)
|
||||
Exec(...any) (sql.Result, error)
|
||||
Find(any, ...any) error
|
||||
Get(beans ...any) (bool, error)
|
||||
ID(any) *xorm.Session
|
||||
In(string, ...any) *xorm.Session
|
||||
Incr(column string, arg ...any) *xorm.Session
|
||||
Insert(...any) (int64, error)
|
||||
Iterate(any, xorm.IterFunc) error
|
||||
Join(joinOperator string, tablename, condition any, args ...any) *xorm.Session
|
||||
SQL(any, ...any) *xorm.Session
|
||||
Where(any, ...any) *xorm.Session
|
||||
Asc(colNames ...string) *xorm.Session
|
||||
Desc(colNames ...string) *xorm.Session
|
||||
Limit(limit int, start ...int) *xorm.Session
|
||||
NoAutoTime() *xorm.Session
|
||||
SumInt(bean interface{}, columnName string) (res int64, err error)
|
||||
Sync2(...interface{}) error
|
||||
SumInt(bean any, columnName string) (res int64, err error)
|
||||
Sync2(...any) error
|
||||
Select(string) *xorm.Session
|
||||
NotIn(string, ...interface{}) *xorm.Session
|
||||
OrderBy(interface{}, ...interface{}) *xorm.Session
|
||||
Exist(...interface{}) (bool, error)
|
||||
NotIn(string, ...any) *xorm.Session
|
||||
OrderBy(any, ...any) *xorm.Session
|
||||
Exist(...any) (bool, error)
|
||||
Distinct(...string) *xorm.Session
|
||||
Query(...interface{}) ([]map[string][]byte, error)
|
||||
Query(...any) ([]map[string][]byte, error)
|
||||
Cols(...string) *xorm.Session
|
||||
Context(ctx context.Context) *xorm.Session
|
||||
Ping() error
|
||||
}
|
||||
|
||||
// TableInfo returns table's information via an object
|
||||
func TableInfo(v interface{}) (*schemas.Table, error) {
|
||||
func TableInfo(v any) (*schemas.Table, error) {
|
||||
return x.TableInfo(v)
|
||||
}
|
||||
|
||||
|
@ -78,7 +78,7 @@ func DumpTables(tables []*schemas.Table, w io.Writer, tp ...schemas.DBType) erro
|
|||
}
|
||||
|
||||
// RegisterModel registers model, if initfunc provided, it will be invoked after data model sync
|
||||
func RegisterModel(bean interface{}, initFunc ...func() error) {
|
||||
func RegisterModel(bean any, initFunc ...func() error) {
|
||||
tables = append(tables, bean)
|
||||
if len(initFuncs) > 0 && initFunc[0] != nil {
|
||||
initFuncs = append(initFuncs, initFunc[0])
|
||||
|
@ -209,14 +209,14 @@ func InitEngineWithMigration(ctx context.Context, migrateFunc func(*xorm.Engine)
|
|||
}
|
||||
|
||||
// NamesToBean return a list of beans or an error
|
||||
func NamesToBean(names ...string) ([]interface{}, error) {
|
||||
beans := []interface{}{}
|
||||
func NamesToBean(names ...string) ([]any, error) {
|
||||
beans := []any{}
|
||||
if len(names) == 0 {
|
||||
beans = append(beans, tables...)
|
||||
return beans, nil
|
||||
}
|
||||
// Need to map provided names to beans...
|
||||
beanMap := make(map[string]interface{})
|
||||
beanMap := make(map[string]any)
|
||||
for _, bean := range tables {
|
||||
|
||||
beanMap[strings.ToLower(reflect.Indirect(reflect.ValueOf(bean)).Type().Name())] = bean
|
||||
|
@ -224,7 +224,7 @@ func NamesToBean(names ...string) ([]interface{}, error) {
|
|||
beanMap[strings.ToLower(x.TableName(bean, true))] = bean
|
||||
}
|
||||
|
||||
gotBean := make(map[interface{}]bool)
|
||||
gotBean := make(map[any]bool)
|
||||
for _, name := range names {
|
||||
bean, ok := beanMap[strings.ToLower(strings.TrimSpace(name))]
|
||||
if !ok {
|
||||
|
@ -266,7 +266,7 @@ func DumpDatabase(filePath, dbType string) error {
|
|||
}
|
||||
|
||||
// MaxBatchInsertSize returns the table's max batch insert size
|
||||
func MaxBatchInsertSize(bean interface{}) int {
|
||||
func MaxBatchInsertSize(bean any) int {
|
||||
t, err := x.TableInfo(bean)
|
||||
if err != nil {
|
||||
return 50
|
||||
|
@ -286,7 +286,7 @@ func DeleteAllRecords(tableName string) error {
|
|||
}
|
||||
|
||||
// GetMaxID will return max id of the table
|
||||
func GetMaxID(beanOrTableName interface{}) (maxID int64, err error) {
|
||||
func GetMaxID(beanOrTableName any) (maxID int64, err error) {
|
||||
_, err = x.Select("MAX(id)").Table(beanOrTableName).Get(&maxID)
|
||||
return maxID, err
|
||||
}
|
||||
|
|
|
@ -25,7 +25,7 @@ func (err ErrCancelled) Error() string {
|
|||
}
|
||||
|
||||
// ErrCancelledf returns an ErrCancelled for the provided format and args
|
||||
func ErrCancelledf(format string, args ...interface{}) error {
|
||||
func ErrCancelledf(format string, args ...any) error {
|
||||
return ErrCancelled{
|
||||
fmt.Sprintf(format, args...),
|
||||
}
|
||||
|
|
|
@ -28,47 +28,47 @@ func NewXORMLogger(showSQL bool) xormlog.Logger {
|
|||
const stackLevel = 8
|
||||
|
||||
// Log a message with defined skip and at logging level
|
||||
func (l *XORMLogBridge) Log(skip int, level log.Level, format string, v ...interface{}) {
|
||||
func (l *XORMLogBridge) Log(skip int, level log.Level, format string, v ...any) {
|
||||
l.logger.Log(skip+1, level, format, v...)
|
||||
}
|
||||
|
||||
// Debug show debug log
|
||||
func (l *XORMLogBridge) Debug(v ...interface{}) {
|
||||
func (l *XORMLogBridge) Debug(v ...any) {
|
||||
l.Log(stackLevel, log.DEBUG, "%s", fmt.Sprint(v...))
|
||||
}
|
||||
|
||||
// Debugf show debug log
|
||||
func (l *XORMLogBridge) Debugf(format string, v ...interface{}) {
|
||||
func (l *XORMLogBridge) Debugf(format string, v ...any) {
|
||||
l.Log(stackLevel, log.DEBUG, format, v...)
|
||||
}
|
||||
|
||||
// Error show error log
|
||||
func (l *XORMLogBridge) Error(v ...interface{}) {
|
||||
func (l *XORMLogBridge) Error(v ...any) {
|
||||
l.Log(stackLevel, log.ERROR, "%s", fmt.Sprint(v...))
|
||||
}
|
||||
|
||||
// Errorf show error log
|
||||
func (l *XORMLogBridge) Errorf(format string, v ...interface{}) {
|
||||
func (l *XORMLogBridge) Errorf(format string, v ...any) {
|
||||
l.Log(stackLevel, log.ERROR, format, v...)
|
||||
}
|
||||
|
||||
// Info show information level log
|
||||
func (l *XORMLogBridge) Info(v ...interface{}) {
|
||||
func (l *XORMLogBridge) Info(v ...any) {
|
||||
l.Log(stackLevel, log.INFO, "%s", fmt.Sprint(v...))
|
||||
}
|
||||
|
||||
// Infof show information level log
|
||||
func (l *XORMLogBridge) Infof(format string, v ...interface{}) {
|
||||
func (l *XORMLogBridge) Infof(format string, v ...any) {
|
||||
l.Log(stackLevel, log.INFO, format, v...)
|
||||
}
|
||||
|
||||
// Warn show warning log
|
||||
func (l *XORMLogBridge) Warn(v ...interface{}) {
|
||||
func (l *XORMLogBridge) Warn(v ...any) {
|
||||
l.Log(stackLevel, log.WARN, "%s", fmt.Sprint(v...))
|
||||
}
|
||||
|
||||
// Warnf show warnning log
|
||||
func (l *XORMLogBridge) Warnf(format string, v ...interface{}) {
|
||||
func (l *XORMLogBridge) Warnf(format string, v ...any) {
|
||||
l.Log(stackLevel, log.WARN, format, v...)
|
||||
}
|
||||
|
||||
|
|
|
@ -172,7 +172,7 @@ func RenameBranch(ctx context.Context, repo *repo_model.Repository, from, to str
|
|||
// 3. Update all not merged pull request base branch name
|
||||
_, err = sess.Table("pull_request").Where("base_repo_id=? AND base_branch=? AND has_merged=?",
|
||||
repo.ID, from, false).
|
||||
Update(map[string]interface{}{"base_branch": to})
|
||||
Update(map[string]any{"base_branch": to})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
|
@ -264,7 +264,7 @@ func LFSAutoAssociate(ctx context.Context, metas []*LFSMetaObject, user *user_mo
|
|||
|
||||
sess := db.GetEngine(ctx)
|
||||
|
||||
oids := make([]interface{}, len(metas))
|
||||
oids := make([]any, len(metas))
|
||||
oidMap := make(map[string]*LFSMetaObject, len(metas))
|
||||
for i, meta := range metas {
|
||||
oids[i] = meta.Oid
|
||||
|
|
|
@ -1131,7 +1131,7 @@ func DeleteComment(ctx context.Context, comment *Comment) error {
|
|||
}
|
||||
if _, err := e.Table("action").
|
||||
Where("comment_id = ?", comment.ID).
|
||||
Update(map[string]interface{}{
|
||||
Update(map[string]any{
|
||||
"is_deleted": true,
|
||||
}); err != nil {
|
||||
return err
|
||||
|
@ -1156,7 +1156,7 @@ func UpdateCommentsMigrationsByType(tp structs.GitServiceType, originalAuthorID
|
|||
}),
|
||||
)).
|
||||
And("comment.original_author_id = ?", originalAuthorID).
|
||||
Update(map[string]interface{}{
|
||||
Update(map[string]any{
|
||||
"poster_id": posterID,
|
||||
"original_author": "",
|
||||
"original_author_id": 0,
|
||||
|
|
|
@ -714,7 +714,7 @@ func (issue *Issue) Pin(ctx context.Context, user *user_model.User) error {
|
|||
|
||||
_, err = db.GetEngine(ctx).Table("issue").
|
||||
Where("id = ?", issue.ID).
|
||||
Update(map[string]interface{}{
|
||||
Update(map[string]any{
|
||||
"pin_order": maxPin + 1,
|
||||
})
|
||||
if err != nil {
|
||||
|
@ -750,7 +750,7 @@ func (issue *Issue) Unpin(ctx context.Context, user *user_model.User) error {
|
|||
|
||||
_, err = db.GetEngine(ctx).Table("issue").
|
||||
Where("id = ?", issue.ID).
|
||||
Update(map[string]interface{}{
|
||||
Update(map[string]any{
|
||||
"pin_order": 0,
|
||||
})
|
||||
if err != nil {
|
||||
|
@ -822,7 +822,7 @@ func (issue *Issue) MovePin(ctx context.Context, newPosition int) error {
|
|||
|
||||
_, err = db.GetEngine(dbctx).Table("issue").
|
||||
Where("id = ?", issue.ID).
|
||||
Update(map[string]interface{}{
|
||||
Update(map[string]any{
|
||||
"pin_order": newPosition,
|
||||
})
|
||||
if err != nil {
|
||||
|
|
|
@ -511,7 +511,7 @@ func UpdateIssueDeadline(issue *Issue, deadlineUnix timeutil.TimeStamp, doer *us
|
|||
}
|
||||
|
||||
// DeleteInIssue delete records in beans with external key issue_id = ?
|
||||
func DeleteInIssue(ctx context.Context, issueID int64, beans ...interface{}) error {
|
||||
func DeleteInIssue(ctx context.Context, issueID int64, beans ...any) error {
|
||||
e := db.GetEngine(ctx)
|
||||
for _, bean := range beans {
|
||||
if _, err := e.In("issue_id", issueID).Delete(bean); err != nil {
|
||||
|
@ -673,7 +673,7 @@ func UpdateIssuesMigrationsByType(gitServiceType api.GitServiceType, originalAut
|
|||
_, err := db.GetEngine(db.DefaultContext).Table("issue").
|
||||
Where("repo_id IN (SELECT id FROM repository WHERE original_service_type = ?)", gitServiceType).
|
||||
And("original_author_id = ?", originalAuthorID).
|
||||
Update(map[string]interface{}{
|
||||
Update(map[string]any{
|
||||
"poster_id": posterID,
|
||||
"original_author": "",
|
||||
"original_author_id": 0,
|
||||
|
@ -686,7 +686,7 @@ func UpdateReactionsMigrationsByType(gitServiceType api.GitServiceType, original
|
|||
_, err := db.GetEngine(db.DefaultContext).Table("reaction").
|
||||
Where("original_author_id = ?", originalAuthorID).
|
||||
And(migratedIssueCond(gitServiceType)).
|
||||
Update(map[string]interface{}{
|
||||
Update(map[string]any{
|
||||
"user_id": userID,
|
||||
"original_author": "",
|
||||
"original_author_id": 0,
|
||||
|
|
|
@ -1090,7 +1090,7 @@ func UpdateReviewsMigrationsByType(tp structs.GitServiceType, originalAuthorID s
|
|||
_, err := db.GetEngine(db.DefaultContext).Table("review").
|
||||
Where("original_author_id = ?", originalAuthorID).
|
||||
And(migratedIssueCond(tp)).
|
||||
Update(map[string]interface{}{
|
||||
Update(map[string]any{
|
||||
"reviewer_id": posterID,
|
||||
"original_author": "",
|
||||
"original_author_id": 0,
|
||||
|
|
|
@ -27,7 +27,7 @@ import (
|
|||
|
||||
// RecreateTables will recreate the tables for the provided beans using the newly provided bean definition and move all data to that new table
|
||||
// WARNING: YOU MUST PROVIDE THE FULL BEAN DEFINITION
|
||||
func RecreateTables(beans ...interface{}) func(*xorm.Engine) error {
|
||||
func RecreateTables(beans ...any) func(*xorm.Engine) error {
|
||||
return func(x *xorm.Engine) error {
|
||||
sess := x.NewSession()
|
||||
defer sess.Close()
|
||||
|
@ -48,7 +48,7 @@ func RecreateTables(beans ...interface{}) func(*xorm.Engine) error {
|
|||
// RecreateTable will recreate the table using the newly provided bean definition and move all data to that new table
|
||||
// WARNING: YOU MUST PROVIDE THE FULL BEAN DEFINITION
|
||||
// WARNING: YOU MUST COMMIT THE SESSION AT THE END
|
||||
func RecreateTable(sess *xorm.Session, bean interface{}) error {
|
||||
func RecreateTable(sess *xorm.Session, bean any) error {
|
||||
// TODO: This will not work if there are foreign keys
|
||||
|
||||
tableName := sess.Engine().TableName(bean)
|
||||
|
|
|
@ -30,7 +30,7 @@ import (
|
|||
// Provide models to be sync'd with the database - in particular any models you expect fixtures to be loaded from.
|
||||
//
|
||||
// fixtures in `models/migrations/fixtures/<TestName>` will be loaded automatically
|
||||
func PrepareTestEnv(t *testing.T, skip int, syncModels ...interface{}) (*xorm.Engine, func()) {
|
||||
func PrepareTestEnv(t *testing.T, skip int, syncModels ...any) (*xorm.Engine, func()) {
|
||||
t.Helper()
|
||||
ourSkip := 2
|
||||
ourSkip += skip
|
||||
|
|
|
@ -59,11 +59,11 @@ func UpdateMigrationServiceTypes(x *xorm.Engine) error {
|
|||
}
|
||||
|
||||
type ExternalLoginUser struct {
|
||||
ExternalID string `xorm:"pk NOT NULL"`
|
||||
UserID int64 `xorm:"INDEX NOT NULL"`
|
||||
LoginSourceID int64 `xorm:"pk NOT NULL"`
|
||||
RawData map[string]interface{} `xorm:"TEXT JSON"`
|
||||
Provider string `xorm:"index VARCHAR(25)"`
|
||||
ExternalID string `xorm:"pk NOT NULL"`
|
||||
UserID int64 `xorm:"INDEX NOT NULL"`
|
||||
LoginSourceID int64 `xorm:"pk NOT NULL"`
|
||||
RawData map[string]any `xorm:"TEXT JSON"`
|
||||
Provider string `xorm:"index VARCHAR(25)"`
|
||||
Email string
|
||||
Name string
|
||||
FirstName string
|
||||
|
|
|
@ -14,7 +14,7 @@ import (
|
|||
)
|
||||
|
||||
func UnwrapLDAPSourceCfg(x *xorm.Engine) error {
|
||||
jsonUnmarshalHandleDoubleEncode := func(bs []byte, v interface{}) error {
|
||||
jsonUnmarshalHandleDoubleEncode := func(bs []byte, v any) error {
|
||||
err := json.Unmarshal(bs, v)
|
||||
if err != nil {
|
||||
ok := true
|
||||
|
@ -54,7 +54,7 @@ func UnwrapLDAPSourceCfg(x *xorm.Engine) error {
|
|||
const dldapType = 5
|
||||
|
||||
type WrappedSource struct {
|
||||
Source map[string]interface{}
|
||||
Source map[string]any
|
||||
}
|
||||
|
||||
// change lower_email as unique
|
||||
|
@ -77,7 +77,7 @@ func UnwrapLDAPSourceCfg(x *xorm.Engine) error {
|
|||
|
||||
for _, source := range sources {
|
||||
wrapped := &WrappedSource{
|
||||
Source: map[string]interface{}{},
|
||||
Source: map[string]any{},
|
||||
}
|
||||
err := jsonUnmarshalHandleDoubleEncode([]byte(source.Cfg), &wrapped)
|
||||
if err != nil {
|
||||
|
|
|
@ -62,8 +62,8 @@ func Test_UnwrapLDAPSourceCfg(t *testing.T) {
|
|||
}
|
||||
|
||||
for _, source := range sources {
|
||||
converted := map[string]interface{}{}
|
||||
expected := map[string]interface{}{}
|
||||
converted := map[string]any{}
|
||||
expected := map[string]any{}
|
||||
|
||||
if err := json.Unmarshal([]byte(source.Cfg), &converted); err != nil {
|
||||
assert.NoError(t, err)
|
||||
|
|
|
@ -79,7 +79,7 @@ func Test_AddHeaderAuthorizationEncryptedColWebhook(t *testing.T) {
|
|||
return
|
||||
}
|
||||
for _, h := range hookTasks {
|
||||
var m map[string]interface{}
|
||||
var m map[string]any
|
||||
err := json.Unmarshal([]byte(h.PayloadContent), &m)
|
||||
assert.NoError(t, err)
|
||||
assert.Nil(t, m["access_token"])
|
||||
|
|
|
@ -81,11 +81,11 @@ func AddIssueDependencies(x *xorm.Engine) (err error) {
|
|||
// RepoUnit describes all units of a repository
|
||||
type RepoUnit struct {
|
||||
ID int64
|
||||
RepoID int64 `xorm:"INDEX(s)"`
|
||||
Type int `xorm:"INDEX(s)"`
|
||||
Config map[string]interface{} `xorm:"JSON"`
|
||||
CreatedUnix int64 `xorm:"INDEX CREATED"`
|
||||
Created time.Time `xorm:"-"`
|
||||
RepoID int64 `xorm:"INDEX(s)"`
|
||||
Type int `xorm:"INDEX(s)"`
|
||||
Config map[string]any `xorm:"JSON"`
|
||||
CreatedUnix int64 `xorm:"INDEX CREATED"`
|
||||
Created time.Time `xorm:"-"`
|
||||
}
|
||||
|
||||
// Updating existing issue units
|
||||
|
@ -96,7 +96,7 @@ func AddIssueDependencies(x *xorm.Engine) (err error) {
|
|||
}
|
||||
for _, unit := range units {
|
||||
if unit.Config == nil {
|
||||
unit.Config = make(map[string]interface{})
|
||||
unit.Config = make(map[string]any)
|
||||
}
|
||||
if _, ok := unit.Config["EnableDependencies"]; !ok {
|
||||
unit.Config["EnableDependencies"] = setting.Service.DefaultEnableDependencies
|
||||
|
|
|
@ -15,10 +15,10 @@ func AddPullRequestRebaseWithMerge(x *xorm.Engine) error {
|
|||
// RepoUnit describes all units of a repository
|
||||
type RepoUnit struct {
|
||||
ID int64
|
||||
RepoID int64 `xorm:"INDEX(s)"`
|
||||
Type int `xorm:"INDEX(s)"`
|
||||
Config map[string]interface{} `xorm:"JSON"`
|
||||
CreatedUnix timeutil.TimeStamp `xorm:"INDEX CREATED"`
|
||||
RepoID int64 `xorm:"INDEX(s)"`
|
||||
Type int `xorm:"INDEX(s)"`
|
||||
Config map[string]any `xorm:"JSON"`
|
||||
CreatedUnix timeutil.TimeStamp `xorm:"INDEX CREATED"`
|
||||
}
|
||||
|
||||
const (
|
||||
|
@ -46,7 +46,7 @@ func AddPullRequestRebaseWithMerge(x *xorm.Engine) error {
|
|||
}
|
||||
for _, unit := range units {
|
||||
if unit.Config == nil {
|
||||
unit.Config = make(map[string]interface{})
|
||||
unit.Config = make(map[string]any)
|
||||
}
|
||||
// Allow the new merge style if all other merge styles are allowed
|
||||
allowMergeRebase := true
|
||||
|
|
|
@ -59,7 +59,7 @@ type PackageDescriptor struct {
|
|||
Creator *user_model.User
|
||||
PackageProperties PackagePropertyList
|
||||
VersionProperties PackagePropertyList
|
||||
Metadata interface{}
|
||||
Metadata any
|
||||
Files []*PackageFileDescriptor
|
||||
}
|
||||
|
||||
|
@ -136,7 +136,7 @@ func GetPackageDescriptor(ctx context.Context, pv *PackageVersion) (*PackageDesc
|
|||
return nil, err
|
||||
}
|
||||
|
||||
var metadata interface{}
|
||||
var metadata any
|
||||
switch p.Type {
|
||||
case TypeAlpine:
|
||||
metadata = &alpine.VersionMetadata{}
|
||||
|
|
|
@ -456,7 +456,7 @@ func repoStatsCorrectNumClosedPulls(ctx context.Context, id int64) error {
|
|||
return repo_model.UpdateRepoIssueNumbers(ctx, id, true, true)
|
||||
}
|
||||
|
||||
func statsQuery(args ...interface{}) func(context.Context) ([]map[string][]byte, error) {
|
||||
func statsQuery(args ...any) func(context.Context) ([]map[string][]byte, error) {
|
||||
return func(ctx context.Context) ([]map[string][]byte, error) {
|
||||
return db.GetEngine(ctx).Query(args...)
|
||||
}
|
||||
|
|
|
@ -105,7 +105,7 @@ func DeleteMirrorByRepoID(repoID int64) error {
|
|||
}
|
||||
|
||||
// MirrorsIterate iterates all mirror repositories.
|
||||
func MirrorsIterate(limit int, f func(idx int, bean interface{}) error) error {
|
||||
func MirrorsIterate(limit int, f func(idx int, bean any) error) error {
|
||||
sess := db.GetEngine(db.DefaultContext).
|
||||
Where("next_update_unix<=?", time.Now().Unix()).
|
||||
And("next_update_unix!=0").
|
||||
|
|
|
@ -127,7 +127,7 @@ func GetPushMirrorsSyncedOnCommit(ctx context.Context, repoID int64) ([]*PushMir
|
|||
}
|
||||
|
||||
// PushMirrorsIterate iterates all push-mirror repositories.
|
||||
func PushMirrorsIterate(ctx context.Context, limit int, f func(idx int, bean interface{}) error) error {
|
||||
func PushMirrorsIterate(ctx context.Context, limit int, f func(idx int, bean any) error) error {
|
||||
sess := db.GetEngine(ctx).
|
||||
Where("last_update + (`interval` / ?) <= ?", time.Second, time.Now().Unix()).
|
||||
And("`interval` != 0").
|
||||
|
|
|
@ -41,7 +41,7 @@ func TestPushMirrorsIterate(t *testing.T) {
|
|||
|
||||
time.Sleep(1 * time.Millisecond)
|
||||
|
||||
repo_model.PushMirrorsIterate(db.DefaultContext, 1, func(idx int, bean interface{}) error {
|
||||
repo_model.PushMirrorsIterate(db.DefaultContext, 1, func(idx int, bean any) error {
|
||||
m, ok := bean.(*repo_model.PushMirror)
|
||||
assert.True(t, ok)
|
||||
assert.Equal(t, "test-1", m.RemoteName)
|
||||
|
|
|
@ -442,7 +442,7 @@ func UpdateReleasesMigrationsByType(gitServiceType structs.GitServiceType, origi
|
|||
_, err := db.GetEngine(db.DefaultContext).Table("release").
|
||||
Where("repo_id IN (SELECT id FROM repository WHERE original_service_type = ?)", gitServiceType).
|
||||
And("original_author_id = ?", originalAuthorID).
|
||||
Update(map[string]interface{}{
|
||||
Update(map[string]any{
|
||||
"publisher_id": posterID,
|
||||
"original_author": "",
|
||||
"original_author_id": 0,
|
||||
|
|
|
@ -560,7 +560,7 @@ func searchRepositoryByCondition(ctx context.Context, opts *SearchRepoOptions, c
|
|||
opts.OrderBy = db.SearchOrderByAlphabetically
|
||||
}
|
||||
|
||||
args := make([]interface{}, 0)
|
||||
args := make([]any, 0)
|
||||
if opts.PriorityOwnerID > 0 {
|
||||
opts.OrderBy = db.SearchOrderBy(fmt.Sprintf("CASE WHEN owner_id = ? THEN 0 ELSE owner_id END, %s", opts.OrderBy))
|
||||
args = append(args, opts.PriorityOwnerID)
|
||||
|
|
|
@ -43,7 +43,7 @@ func (n *Notice) TrStr() string {
|
|||
}
|
||||
|
||||
// CreateNotice creates new system notice.
|
||||
func CreateNotice(ctx context.Context, tp NoticeType, desc string, args ...interface{}) error {
|
||||
func CreateNotice(ctx context.Context, tp NoticeType, desc string, args ...any) error {
|
||||
if len(args) > 0 {
|
||||
desc = fmt.Sprintf(desc, args...)
|
||||
}
|
||||
|
@ -55,7 +55,7 @@ func CreateNotice(ctx context.Context, tp NoticeType, desc string, args ...inter
|
|||
}
|
||||
|
||||
// CreateRepositoryNotice creates new system notice with type NoticeRepository.
|
||||
func CreateRepositoryNotice(desc string, args ...interface{}) error {
|
||||
func CreateRepositoryNotice(desc string, args ...any) error {
|
||||
// Note we use the db.DefaultContext here rather than passing in a context as the context may be cancelled
|
||||
return CreateNotice(db.DefaultContext, NoticeRepository, desc, args...)
|
||||
}
|
||||
|
|
|
@ -21,10 +21,10 @@ const (
|
|||
modelsCommentTypeComment = 0
|
||||
)
|
||||
|
||||
var consistencyCheckMap = make(map[string]func(t assert.TestingT, bean interface{}))
|
||||
var consistencyCheckMap = make(map[string]func(t assert.TestingT, bean any))
|
||||
|
||||
// CheckConsistencyFor test that all matching database entries are consistent
|
||||
func CheckConsistencyFor(t assert.TestingT, beansToCheck ...interface{}) {
|
||||
func CheckConsistencyFor(t assert.TestingT, beansToCheck ...any) {
|
||||
for _, bean := range beansToCheck {
|
||||
sliceType := reflect.SliceOf(reflect.TypeOf(bean))
|
||||
sliceValue := reflect.MakeSlice(sliceType, 0, 10)
|
||||
|
@ -42,7 +42,7 @@ func CheckConsistencyFor(t assert.TestingT, beansToCheck ...interface{}) {
|
|||
}
|
||||
}
|
||||
|
||||
func checkForConsistency(t assert.TestingT, bean interface{}) {
|
||||
func checkForConsistency(t assert.TestingT, bean any) {
|
||||
tb, err := db.TableInfo(bean)
|
||||
assert.NoError(t, err)
|
||||
f := consistencyCheckMap[tb.Name]
|
||||
|
@ -63,7 +63,7 @@ func init() {
|
|||
return i
|
||||
}
|
||||
|
||||
checkForUserConsistency := func(t assert.TestingT, bean interface{}) {
|
||||
checkForUserConsistency := func(t assert.TestingT, bean any) {
|
||||
user := reflectionWrap(bean)
|
||||
AssertCountByCond(t, "repository", builder.Eq{"owner_id": user.int("ID")}, user.int("NumRepos"))
|
||||
AssertCountByCond(t, "star", builder.Eq{"uid": user.int("ID")}, user.int("NumStars"))
|
||||
|
@ -77,7 +77,7 @@ func init() {
|
|||
}
|
||||
}
|
||||
|
||||
checkForRepoConsistency := func(t assert.TestingT, bean interface{}) {
|
||||
checkForRepoConsistency := func(t assert.TestingT, bean any) {
|
||||
repo := reflectionWrap(bean)
|
||||
assert.Equal(t, repo.str("LowerName"), strings.ToLower(repo.str("Name")), "repo: %+v", repo)
|
||||
AssertCountByCond(t, "star", builder.Eq{"repo_id": repo.int("ID")}, repo.int("NumStars"))
|
||||
|
@ -113,7 +113,7 @@ func init() {
|
|||
"Unexpected number of closed milestones for repo id: %d", repo.int("ID"))
|
||||
}
|
||||
|
||||
checkForIssueConsistency := func(t assert.TestingT, bean interface{}) {
|
||||
checkForIssueConsistency := func(t assert.TestingT, bean any) {
|
||||
issue := reflectionWrap(bean)
|
||||
typeComment := modelsCommentTypeComment
|
||||
actual := GetCountByCond(t, "comment", builder.Eq{"`type`": typeComment, "issue_id": issue.int("ID")})
|
||||
|
@ -124,14 +124,14 @@ func init() {
|
|||
}
|
||||
}
|
||||
|
||||
checkForPullRequestConsistency := func(t assert.TestingT, bean interface{}) {
|
||||
checkForPullRequestConsistency := func(t assert.TestingT, bean any) {
|
||||
pr := reflectionWrap(bean)
|
||||
issueRow := AssertExistsAndLoadMap(t, "issue", builder.Eq{"id": pr.int("IssueID")})
|
||||
assert.True(t, parseBool(issueRow["is_pull"]))
|
||||
assert.EqualValues(t, parseInt(issueRow["index"]), pr.int("Index"), "Unexpected index for pull request id: %d", pr.int("ID"))
|
||||
}
|
||||
|
||||
checkForMilestoneConsistency := func(t assert.TestingT, bean interface{}) {
|
||||
checkForMilestoneConsistency := func(t assert.TestingT, bean any) {
|
||||
milestone := reflectionWrap(bean)
|
||||
AssertCountByCond(t, "issue", builder.Eq{"milestone_id": milestone.int("ID")}, milestone.int("NumIssues"))
|
||||
|
||||
|
@ -145,7 +145,7 @@ func init() {
|
|||
assert.Equal(t, completeness, milestone.int("Completeness"))
|
||||
}
|
||||
|
||||
checkForLabelConsistency := func(t assert.TestingT, bean interface{}) {
|
||||
checkForLabelConsistency := func(t assert.TestingT, bean any) {
|
||||
label := reflectionWrap(bean)
|
||||
issueLabels, err := db.GetEngine(db.DefaultContext).Table("issue_label").
|
||||
Where(builder.Eq{"label_id": label.int("ID")}).
|
||||
|
@ -166,13 +166,13 @@ func init() {
|
|||
assert.EqualValues(t, expected, label.int("NumClosedIssues"), "Unexpected number of closed issues for label id: %d", label.int("ID"))
|
||||
}
|
||||
|
||||
checkForTeamConsistency := func(t assert.TestingT, bean interface{}) {
|
||||
checkForTeamConsistency := func(t assert.TestingT, bean any) {
|
||||
team := reflectionWrap(bean)
|
||||
AssertCountByCond(t, "team_user", builder.Eq{"team_id": team.int("ID")}, team.int("NumMembers"))
|
||||
AssertCountByCond(t, "team_repo", builder.Eq{"team_id": team.int("ID")}, team.int("NumRepos"))
|
||||
}
|
||||
|
||||
checkForActionConsistency := func(t assert.TestingT, bean interface{}) {
|
||||
checkForActionConsistency := func(t assert.TestingT, bean any) {
|
||||
action := reflectionWrap(bean)
|
||||
if action.int("RepoID") != 1700 { // dangling intentional
|
||||
repoRow := AssertExistsAndLoadMap(t, "repository", builder.Eq{"id": action.int("RepoID")})
|
||||
|
|
|
@ -23,7 +23,7 @@ type reflectionValue struct {
|
|||
v reflect.Value
|
||||
}
|
||||
|
||||
func reflectionWrap(v interface{}) *reflectionValue {
|
||||
func reflectionWrap(v any) *reflectionValue {
|
||||
return &reflectionValue{v: reflect.ValueOf(v)}
|
||||
}
|
||||
|
||||
|
|
|
@ -37,7 +37,7 @@ func FixturesDir() string {
|
|||
return fixturesDir
|
||||
}
|
||||
|
||||
func fatalTestError(fmtStr string, args ...interface{}) {
|
||||
func fatalTestError(fmtStr string, args ...any) {
|
||||
_, _ = fmt.Fprintf(os.Stderr, fmtStr, args...)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
|
|
@ -57,11 +57,11 @@ func (err ErrExternalLoginUserNotExist) Unwrap() error {
|
|||
|
||||
// ExternalLoginUser makes the connecting between some existing user and additional external login sources
|
||||
type ExternalLoginUser struct {
|
||||
ExternalID string `xorm:"pk NOT NULL"`
|
||||
UserID int64 `xorm:"INDEX NOT NULL"`
|
||||
LoginSourceID int64 `xorm:"pk NOT NULL"`
|
||||
RawData map[string]interface{} `xorm:"TEXT JSON"`
|
||||
Provider string `xorm:"index VARCHAR(25)"`
|
||||
ExternalID string `xorm:"pk NOT NULL"`
|
||||
UserID int64 `xorm:"INDEX NOT NULL"`
|
||||
LoginSourceID int64 `xorm:"pk NOT NULL"`
|
||||
RawData map[string]any `xorm:"TEXT JSON"`
|
||||
Provider string `xorm:"index VARCHAR(25)"`
|
||||
Email string
|
||||
Name string
|
||||
FirstName string
|
||||
|
|
|
@ -92,7 +92,7 @@ func (t *HookTask) AfterLoad() {
|
|||
}
|
||||
}
|
||||
|
||||
func (t *HookTask) simpleMarshalJSON(v interface{}) string {
|
||||
func (t *HookTask) simpleMarshalJSON(v any) string {
|
||||
p, err := json.Marshal(v)
|
||||
if err != nil {
|
||||
log.Error("Marshal [%d]: %v", t.ID, err)
|
||||
|
|
|
@ -21,7 +21,7 @@ import (
|
|||
)
|
||||
|
||||
func init() {
|
||||
model.OnDecodeNodeError = func(node yaml.Node, out interface{}, err error) {
|
||||
model.OnDecodeNodeError = func(node yaml.Node, out any, err error) {
|
||||
// Log the error instead of panic or fatal.
|
||||
// It will be a big job to refactor act/pkg/model to return decode error,
|
||||
// so we just log the error and return empty value, and improve it later.
|
||||
|
|
|
@ -107,7 +107,7 @@ const TimeLimitCodeLength = 12 + 6 + 40
|
|||
|
||||
// CreateTimeLimitCode create a time limit code
|
||||
// code format: 12 length date time string + 6 minutes string + 40 sha1 encoded string
|
||||
func CreateTimeLimitCode(data string, minutes int, startInf interface{}) string {
|
||||
func CreateTimeLimitCode(data string, minutes int, startInf any) string {
|
||||
format := "200601021504"
|
||||
|
||||
var start, end time.Time
|
||||
|
@ -245,7 +245,7 @@ func SetupGiteaRoot() string {
|
|||
}
|
||||
|
||||
// FormatNumberSI format a number
|
||||
func FormatNumberSI(data interface{}) string {
|
||||
func FormatNumberSI(data any) string {
|
||||
var num int64
|
||||
if num1, ok := data.(int64); ok {
|
||||
num = num1
|
||||
|
|
6
modules/cache/cache_redis.go
vendored
6
modules/cache/cache_redis.go
vendored
|
@ -24,7 +24,7 @@ type RedisCacher struct {
|
|||
}
|
||||
|
||||
// toStr convert string/int/int64 interface to string. it's only used by the RedisCacher.Put internally
|
||||
func toStr(v interface{}) string {
|
||||
func toStr(v any) string {
|
||||
if v == nil {
|
||||
return ""
|
||||
}
|
||||
|
@ -44,7 +44,7 @@ func toStr(v interface{}) string {
|
|||
|
||||
// Put puts value (string type) into cache with key and expire time.
|
||||
// If expired is 0, it lives forever.
|
||||
func (c *RedisCacher) Put(key string, val interface{}, expire int64) error {
|
||||
func (c *RedisCacher) Put(key string, val any, expire int64) error {
|
||||
// this function is not well-designed, it only puts string values into cache
|
||||
key = c.prefix + key
|
||||
if expire == 0 {
|
||||
|
@ -65,7 +65,7 @@ func (c *RedisCacher) Put(key string, val interface{}, expire int64) error {
|
|||
}
|
||||
|
||||
// Get gets cached value by given key.
|
||||
func (c *RedisCacher) Get(key string) interface{} {
|
||||
func (c *RedisCacher) Get(key string) any {
|
||||
val, err := c.c.Get(graceful.GetManager().HammerContext(), c.prefix+key).Result()
|
||||
if err != nil {
|
||||
return nil
|
||||
|
|
8
modules/cache/cache_twoqueue.go
vendored
8
modules/cache/cache_twoqueue.go
vendored
|
@ -30,7 +30,7 @@ type TwoQueueCacheConfig struct {
|
|||
|
||||
// MemoryItem represents a memory cache item.
|
||||
type MemoryItem struct {
|
||||
Val interface{}
|
||||
Val any
|
||||
Created int64
|
||||
Timeout int64
|
||||
}
|
||||
|
@ -43,7 +43,7 @@ func (item *MemoryItem) hasExpired() bool {
|
|||
var _ mc.Cache = &TwoQueueCache{}
|
||||
|
||||
// Put puts value into cache with key and expire time.
|
||||
func (c *TwoQueueCache) Put(key string, val interface{}, timeout int64) error {
|
||||
func (c *TwoQueueCache) Put(key string, val any, timeout int64) error {
|
||||
item := &MemoryItem{
|
||||
Val: val,
|
||||
Created: time.Now().Unix(),
|
||||
|
@ -56,7 +56,7 @@ func (c *TwoQueueCache) Put(key string, val interface{}, timeout int64) error {
|
|||
}
|
||||
|
||||
// Get gets cached value by given key.
|
||||
func (c *TwoQueueCache) Get(key string) interface{} {
|
||||
func (c *TwoQueueCache) Get(key string) any {
|
||||
c.lock.Lock()
|
||||
defer c.lock.Unlock()
|
||||
cached, ok := c.cache.Get(key)
|
||||
|
@ -146,7 +146,7 @@ func (c *TwoQueueCache) Flush() error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (c *TwoQueueCache) checkAndInvalidate(key interface{}) {
|
||||
func (c *TwoQueueCache) checkAndInvalidate(key any) {
|
||||
c.lock.Lock()
|
||||
defer c.lock.Unlock()
|
||||
cached, ok := c.cache.Peek(key)
|
||||
|
|
|
@ -90,7 +90,7 @@ Usage: %[1]s [-v] [-o output.go] ambiguous.json
|
|||
sort.Slice(tables, func(i, j int) bool {
|
||||
return tables[i].Locale < tables[j].Locale
|
||||
})
|
||||
data := map[string]interface{}{
|
||||
data := map[string]any{
|
||||
"Tables": tables,
|
||||
}
|
||||
|
||||
|
@ -99,7 +99,7 @@ Usage: %[1]s [-v] [-o output.go] ambiguous.json
|
|||
}
|
||||
}
|
||||
|
||||
func runTemplate(t *template.Template, filename string, data interface{}) error {
|
||||
func runTemplate(t *template.Template, filename string, data any) error {
|
||||
buf := bytes.NewBuffer(nil)
|
||||
if err := t.Execute(buf, data); err != nil {
|
||||
return fmt.Errorf("unable to execute template: %w", err)
|
||||
|
@ -172,17 +172,17 @@ var AmbiguousCharacters = map[string]*AmbiguousTable{
|
|||
|
||||
`))
|
||||
|
||||
func logf(format string, args ...interface{}) {
|
||||
func logf(format string, args ...any) {
|
||||
fmt.Fprintf(os.Stderr, format+"\n", args...)
|
||||
}
|
||||
|
||||
func verbosef(format string, args ...interface{}) {
|
||||
func verbosef(format string, args ...any) {
|
||||
if verbose {
|
||||
logf(format, args...)
|
||||
}
|
||||
}
|
||||
|
||||
func fatalf(format string, args ...interface{}) {
|
||||
func fatalf(format string, args ...any) {
|
||||
logf("fatal: "+format+"\n", args...)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
|
|
@ -52,7 +52,7 @@ Usage: %[1]s [-v] [-o output.go]
|
|||
}
|
||||
}
|
||||
|
||||
func runTemplate(t *template.Template, filename string, data interface{}) error {
|
||||
func runTemplate(t *template.Template, filename string, data any) error {
|
||||
buf := bytes.NewBuffer(nil)
|
||||
if err := t.Execute(buf, data); err != nil {
|
||||
return fmt.Errorf("unable to execute template: %w", err)
|
||||
|
@ -105,17 +105,17 @@ var InvisibleRanges = &unicode.RangeTable{
|
|||
}
|
||||
`))
|
||||
|
||||
func logf(format string, args ...interface{}) {
|
||||
func logf(format string, args ...any) {
|
||||
fmt.Fprintf(os.Stderr, format+"\n", args...)
|
||||
}
|
||||
|
||||
func verbosef(format string, args ...interface{}) {
|
||||
func verbosef(format string, args ...any) {
|
||||
if verbose {
|
||||
logf(format, args...)
|
||||
}
|
||||
}
|
||||
|
||||
func fatalf(format string, args ...interface{}) {
|
||||
func fatalf(format string, args ...any) {
|
||||
logf("fatal: "+format+"\n", args...)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
|
|
@ -23,7 +23,7 @@ type routerLoggerOptions struct {
|
|||
Identity *string
|
||||
Start *time.Time
|
||||
ResponseWriter http.ResponseWriter
|
||||
Ctx map[string]interface{}
|
||||
Ctx map[string]any
|
||||
RequestID *string
|
||||
}
|
||||
|
||||
|
@ -84,7 +84,7 @@ func AccessLogger() func(http.Handler) http.Handler {
|
|||
Identity: &identity,
|
||||
Start: &start,
|
||||
ResponseWriter: rw,
|
||||
Ctx: map[string]interface{}{
|
||||
Ctx: map[string]any{
|
||||
"RemoteAddr": req.RemoteAddr,
|
||||
"RemoteHost": reqHost,
|
||||
"Req": req,
|
||||
|
|
|
@ -100,7 +100,7 @@ func (ctx *APIContext) ServerError(title string, err error) {
|
|||
|
||||
// Error responds with an error message to client with given obj as the message.
|
||||
// If status is 500, also it prints error to log.
|
||||
func (ctx *APIContext) Error(status int, title string, obj interface{}) {
|
||||
func (ctx *APIContext) Error(status int, title string, obj any) {
|
||||
var message string
|
||||
if err, ok := obj.(error); ok {
|
||||
message = err.Error()
|
||||
|
@ -257,7 +257,7 @@ func APIContexter() func(http.Handler) http.Handler {
|
|||
|
||||
// NotFound handles 404s for APIContext
|
||||
// String will replace message, errors will be added to a slice
|
||||
func (ctx *APIContext) NotFound(objs ...interface{}) {
|
||||
func (ctx *APIContext) NotFound(objs ...any) {
|
||||
message := ctx.Tr("error.not_found")
|
||||
var errors []string
|
||||
for _, obj := range objs {
|
||||
|
@ -273,7 +273,7 @@ func (ctx *APIContext) NotFound(objs ...interface{}) {
|
|||
}
|
||||
}
|
||||
|
||||
ctx.JSON(http.StatusNotFound, map[string]interface{}{
|
||||
ctx.JSON(http.StatusNotFound, map[string]any{
|
||||
"message": message,
|
||||
"url": setting.API.SwaggerURL,
|
||||
"errors": errors,
|
||||
|
|
|
@ -124,7 +124,7 @@ func (b *Base) Error(status int, contents ...string) {
|
|||
}
|
||||
|
||||
// JSON render content as JSON
|
||||
func (b *Base) JSON(status int, content interface{}) {
|
||||
func (b *Base) JSON(status int, content any) {
|
||||
b.Resp.Header().Set("Content-Type", "application/json;charset=utf-8")
|
||||
b.Resp.WriteHeader(status)
|
||||
if err := json.NewEncoder(b.Resp).Encode(content); err != nil {
|
||||
|
|
|
@ -60,7 +60,7 @@ const (
|
|||
|
||||
// VerifyCaptcha verifies Captcha data
|
||||
// No-op if captchas are not enabled
|
||||
func VerifyCaptcha(ctx *Context, tpl base.TplName, form interface{}) {
|
||||
func VerifyCaptcha(ctx *Context, tpl base.TplName, form any) {
|
||||
if !setting.Service.EnableCaptcha {
|
||||
return
|
||||
}
|
||||
|
|
|
@ -30,7 +30,7 @@ import (
|
|||
// Render represents a template render
|
||||
type Render interface {
|
||||
TemplateLookup(tmpl string) (templates.TemplateExecutor, error)
|
||||
HTML(w io.Writer, status int, name string, data interface{}) error
|
||||
HTML(w io.Writer, status int, name string, data any) error
|
||||
}
|
||||
|
||||
// Context represents context of a request.
|
||||
|
@ -61,7 +61,7 @@ type Context struct {
|
|||
// TrHTMLEscapeArgs runs ".Locale.Tr()" but pre-escapes all arguments with html.EscapeString.
|
||||
// This is useful if the locale message is intended to only produce HTML content.
|
||||
func (ctx *Context) TrHTMLEscapeArgs(msg string, args ...string) string {
|
||||
trArgs := make([]interface{}, len(args))
|
||||
trArgs := make([]any, len(args))
|
||||
for i, arg := range args {
|
||||
trArgs[i] = html.EscapeString(arg)
|
||||
}
|
||||
|
|
|
@ -97,14 +97,14 @@ func (ctx *Context) HTML(status int, name base.TplName) {
|
|||
}
|
||||
|
||||
// RenderToString renders the template content to a string
|
||||
func (ctx *Context) RenderToString(name base.TplName, data map[string]interface{}) (string, error) {
|
||||
func (ctx *Context) RenderToString(name base.TplName, data map[string]any) (string, error) {
|
||||
var buf strings.Builder
|
||||
err := ctx.Render.HTML(&buf, http.StatusOK, string(name), data)
|
||||
return buf.String(), err
|
||||
}
|
||||
|
||||
// RenderWithErr used for page has form validation but need to prompt error to users.
|
||||
func (ctx *Context) RenderWithErr(msg string, tpl base.TplName, form interface{}) {
|
||||
func (ctx *Context) RenderWithErr(msg string, tpl base.TplName, form any) {
|
||||
if form != nil {
|
||||
middleware.AssignForm(form, ctx.Data)
|
||||
}
|
||||
|
|
|
@ -33,7 +33,7 @@ type packageAssignmentCtx struct {
|
|||
// PackageAssignment returns a middleware to handle Context.Package assignment
|
||||
func PackageAssignment() func(ctx *Context) {
|
||||
return func(ctx *Context) {
|
||||
errorFn := func(status int, title string, obj interface{}) {
|
||||
errorFn := func(status int, title string, obj any) {
|
||||
err, ok := obj.(error)
|
||||
if !ok {
|
||||
err = fmt.Errorf("%s", obj)
|
||||
|
@ -57,7 +57,7 @@ func PackageAssignmentAPI() func(ctx *APIContext) {
|
|||
}
|
||||
}
|
||||
|
||||
func packageAssignment(ctx *packageAssignmentCtx, errCb func(int, string, interface{})) *Package {
|
||||
func packageAssignment(ctx *packageAssignmentCtx, errCb func(int, string, any)) *Package {
|
||||
pkg := &Package{
|
||||
Owner: ctx.ContextUser,
|
||||
}
|
||||
|
|
|
@ -32,7 +32,7 @@ func (p *Pagination) AddParam(ctx *Context, paramKey, ctxKey string) {
|
|||
if !exists {
|
||||
return
|
||||
}
|
||||
paramData := fmt.Sprintf("%v", ctx.Data[ctxKey]) // cast interface{} to string
|
||||
paramData := fmt.Sprintf("%v", ctx.Data[ctxKey]) // cast any to string
|
||||
urlParam := fmt.Sprintf("%s=%v", url.QueryEscape(paramKey), url.QueryEscape(paramData))
|
||||
p.urlParams = append(p.urlParams, urlParam)
|
||||
}
|
||||
|
|
|
@ -90,7 +90,7 @@ func RequireRepoReaderOr(unitTypes ...unit.Type) func(ctx *Context) {
|
|||
}
|
||||
if log.IsTrace() {
|
||||
var format string
|
||||
var args []interface{}
|
||||
var args []any
|
||||
if ctx.IsSigned {
|
||||
format = "Permission Denied: User %-v cannot read ["
|
||||
args = append(args, ctx.Doer)
|
||||
|
|
|
@ -45,7 +45,7 @@ func (ctx *PrivateContext) Err() error {
|
|||
return ctx.Base.Err()
|
||||
}
|
||||
|
||||
var privateContextKey interface{} = "default_private_context"
|
||||
var privateContextKey any = "default_private_context"
|
||||
|
||||
// GetPrivateContext returns a context for Private routes
|
||||
func GetPrivateContext(req *http.Request) *PrivateContext {
|
||||
|
|
|
@ -51,8 +51,8 @@ func wrapNewlines(w io.Writer, prefix, value []byte) (sum int64, err error) {
|
|||
type Event struct {
|
||||
// Name represents the value of the event: tag in the stream
|
||||
Name string
|
||||
// Data is either JSONified []byte or interface{} that can be JSONd
|
||||
Data interface{}
|
||||
// Data is either JSONified []byte or any that can be JSONd
|
||||
Data any
|
||||
// ID represents the ID of an event
|
||||
ID string
|
||||
// Retry tells the receiver only to attempt to reconnect to the source after this time
|
||||
|
|
|
@ -177,7 +177,7 @@ func GetLastCommitForPaths(ctx context.Context, cache *LastCommitCache, c cgobje
|
|||
refSha := c.ID().String()
|
||||
|
||||
// We do a tree traversal with nodes sorted by commit time
|
||||
heap := binaryheap.NewWith(func(a, b interface{}) int {
|
||||
heap := binaryheap.NewWith(func(a, b any) int {
|
||||
if a.(*commitAndPaths).commit.CommitTime().Before(b.(*commitAndPaths).commit.CommitTime()) {
|
||||
return 1
|
||||
}
|
||||
|
|
|
@ -217,7 +217,7 @@ func TestParser(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func pretty(v interface{}) string {
|
||||
func pretty(v any) string {
|
||||
data, err := json.MarshalIndent(v, "", " ")
|
||||
if err != nil {
|
||||
// shouldn't happen
|
||||
|
|
|
@ -114,7 +114,7 @@ func VersionInfo() string {
|
|||
return "(git not found)"
|
||||
}
|
||||
format := "%s"
|
||||
args := []interface{}{gitVersion.Original()}
|
||||
args := []any{gitVersion.Original()}
|
||||
// Since git wire protocol has been released from git v2.18
|
||||
if setting.Git.EnableAutoGitWireProtocol && CheckGitVersionAtLeast("2.18") == nil {
|
||||
format += ", Wire Protocol %s Enabled"
|
||||
|
|
|
@ -15,9 +15,9 @@ import (
|
|||
// Cache represents a caching interface
|
||||
type Cache interface {
|
||||
// Put puts value into cache with key and expire time.
|
||||
Put(key string, val interface{}, timeout int64) error
|
||||
Put(key string, val any, timeout int64) error
|
||||
// Get gets cached value by given key.
|
||||
Get(key string) interface{}
|
||||
Get(key string) any
|
||||
}
|
||||
|
||||
func getCacheKey(repoPath, commitID, entryPath string) string {
|
||||
|
|
|
@ -15,17 +15,17 @@ import (
|
|||
// ObjectCache provides thread-safe cache operations.
|
||||
type ObjectCache struct {
|
||||
lock sync.RWMutex
|
||||
cache map[string]interface{}
|
||||
cache map[string]any
|
||||
}
|
||||
|
||||
func newObjectCache() *ObjectCache {
|
||||
return &ObjectCache{
|
||||
cache: make(map[string]interface{}, 10),
|
||||
cache: make(map[string]any, 10),
|
||||
}
|
||||
}
|
||||
|
||||
// Set add obj to cache
|
||||
func (oc *ObjectCache) Set(id string, obj interface{}) {
|
||||
func (oc *ObjectCache) Set(id string, obj any) {
|
||||
oc.lock.Lock()
|
||||
defer oc.lock.Unlock()
|
||||
|
||||
|
@ -33,7 +33,7 @@ func (oc *ObjectCache) Set(id string, obj interface{}) {
|
|||
}
|
||||
|
||||
// Get get cached obj by id
|
||||
func (oc *ObjectCache) Get(id string) (interface{}, bool) {
|
||||
func (oc *ObjectCache) Get(id string) (any, bool) {
|
||||
oc.lock.RLock()
|
||||
defer oc.lock.RUnlock()
|
||||
|
||||
|
|
|
@ -283,7 +283,7 @@ func (g *Manager) Err() error {
|
|||
}
|
||||
|
||||
// Value allows the manager to be viewed as a context.Context done at Terminate
|
||||
func (g *Manager) Value(key interface{}) interface{} {
|
||||
func (g *Manager) Value(key any) any {
|
||||
return g.managerCtx.Value(key)
|
||||
}
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@ package html
|
|||
|
||||
// ParseSizeAndClass get size and class from string with default values
|
||||
// If present, "others" expects the new size first and then the classes to use
|
||||
func ParseSizeAndClass(defaultSize int, defaultClass string, others ...interface{}) (int, string) {
|
||||
func ParseSizeAndClass(defaultSize int, defaultClass string, others ...any) (int, string) {
|
||||
if len(others) == 0 {
|
||||
return defaultSize, defaultClass
|
||||
}
|
||||
|
|
|
@ -101,7 +101,7 @@ func (r *Request) Param(key, value string) *Request {
|
|||
|
||||
// Body adds request raw body.
|
||||
// it supports string and []byte.
|
||||
func (r *Request) Body(data interface{}) *Request {
|
||||
func (r *Request) Body(data any) *Request {
|
||||
switch t := data.(type) {
|
||||
case string:
|
||||
bf := bytes.NewBufferString(t)
|
||||
|
|
|
@ -27,7 +27,7 @@ func NewFlushingBatch(index bleve.Index, maxBatchSize int) *FlushingBatch {
|
|||
}
|
||||
|
||||
// Index add a new index to batch
|
||||
func (b *FlushingBatch) Index(id string, data interface{}) error {
|
||||
func (b *FlushingBatch) Index(id string, data any) error {
|
||||
if err := b.batch.Index(id, data); err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
|
@ -53,7 +53,7 @@ func numericEqualityQuery(value int64, field string) *query.NumericRangeQuery {
|
|||
}
|
||||
|
||||
func addUnicodeNormalizeTokenFilter(m *mapping.IndexMappingImpl) error {
|
||||
return m.AddCustomTokenFilter(unicodeNormalizeName, map[string]interface{}{
|
||||
return m.AddCustomTokenFilter(unicodeNormalizeName, map[string]any{
|
||||
"type": unicodenorm.Name,
|
||||
"form": unicodenorm.NFC,
|
||||
})
|
||||
|
@ -135,7 +135,7 @@ func createBleveIndexer(path string, latestVersion int) (bleve.Index, error) {
|
|||
mapping := bleve.NewIndexMapping()
|
||||
if err := addUnicodeNormalizeTokenFilter(mapping); err != nil {
|
||||
return nil, err
|
||||
} else if err := mapping.AddCustomAnalyzer(repoIndexerAnalyzer, map[string]interface{}{
|
||||
} else if err := mapping.AddCustomAnalyzer(repoIndexerAnalyzer, map[string]any{
|
||||
"type": analyzer_custom.Name,
|
||||
"char_filters": []string{},
|
||||
"tokenizer": unicode.Name,
|
||||
|
|
|
@ -241,7 +241,7 @@ func (b *ElasticSearchIndexer) addUpdate(ctx context.Context, batchWriter git.Wr
|
|||
elastic.NewBulkIndexRequest().
|
||||
Index(b.indexerAliasName).
|
||||
Id(id).
|
||||
Doc(map[string]interface{}{
|
||||
Doc(map[string]any{
|
||||
"repo_id": repo.ID,
|
||||
"content": string(charset.ToUTF8DropErrors(fileContents)),
|
||||
"commit_id": sha,
|
||||
|
@ -342,7 +342,7 @@ func convertResult(searchResult *elastic.SearchResult, kw string, pageSize int)
|
|||
}
|
||||
|
||||
repoID, fileName := parseIndexerID(hit.Id)
|
||||
res := make(map[string]interface{})
|
||||
res := make(map[string]any)
|
||||
if err := json.Unmarshal(hit.Source, &res); err != nil {
|
||||
return 0, nil, nil, err
|
||||
}
|
||||
|
@ -393,7 +393,7 @@ func (b *ElasticSearchIndexer) Search(ctx context.Context, repoIDs []int64, lang
|
|||
query := elastic.NewBoolQuery()
|
||||
query = query.Must(kwQuery)
|
||||
if len(repoIDs) > 0 {
|
||||
repoStrs := make([]interface{}, 0, len(repoIDs))
|
||||
repoStrs := make([]any, 0, len(repoIDs))
|
||||
for _, repoID := range repoIDs {
|
||||
repoStrs = append(repoStrs, repoID)
|
||||
}
|
||||
|
|
|
@ -64,7 +64,7 @@ func newMatchPhraseQuery(matchPhrase, field, analyzer string) *query.MatchPhrase
|
|||
const unicodeNormalizeName = "unicodeNormalize"
|
||||
|
||||
func addUnicodeNormalizeTokenFilter(m *mapping.IndexMappingImpl) error {
|
||||
return m.AddCustomTokenFilter(unicodeNormalizeName, map[string]interface{}{
|
||||
return m.AddCustomTokenFilter(unicodeNormalizeName, map[string]any{
|
||||
"type": unicodenorm.Name,
|
||||
"form": unicodenorm.NFC,
|
||||
})
|
||||
|
@ -131,7 +131,7 @@ func createIssueIndexer(path string, latestVersion int) (bleve.Index, error) {
|
|||
|
||||
if err := addUnicodeNormalizeTokenFilter(mapping); err != nil {
|
||||
return nil, err
|
||||
} else if err = mapping.AddCustomAnalyzer(issueIndexerAnalyzer, map[string]interface{}{
|
||||
} else if err = mapping.AddCustomAnalyzer(issueIndexerAnalyzer, map[string]any{
|
||||
"type": custom.Name,
|
||||
"char_filters": []string{},
|
||||
"tokenizer": unicode.Name,
|
||||
|
|
|
@ -140,7 +140,7 @@ func (b *ElasticSearchIndexer) Index(issues []*IndexerData) error {
|
|||
_, err := b.client.Index().
|
||||
Index(b.indexerName).
|
||||
Id(fmt.Sprintf("%d", issue.ID)).
|
||||
BodyJson(map[string]interface{}{
|
||||
BodyJson(map[string]any{
|
||||
"id": issue.ID,
|
||||
"repo_id": issue.RepoID,
|
||||
"title": issue.Title,
|
||||
|
@ -157,7 +157,7 @@ func (b *ElasticSearchIndexer) Index(issues []*IndexerData) error {
|
|||
elastic.NewBulkIndexRequest().
|
||||
Index(b.indexerName).
|
||||
Id(fmt.Sprintf("%d", issue.ID)).
|
||||
Doc(map[string]interface{}{
|
||||
Doc(map[string]any{
|
||||
"id": issue.ID,
|
||||
"repo_id": issue.RepoID,
|
||||
"title": issue.Title,
|
||||
|
@ -209,7 +209,7 @@ func (b *ElasticSearchIndexer) Search(ctx context.Context, keyword string, repoI
|
|||
query := elastic.NewBoolQuery()
|
||||
query = query.Must(kwQuery)
|
||||
if len(repoIDs) > 0 {
|
||||
repoStrs := make([]interface{}, 0, len(repoIDs))
|
||||
repoStrs := make([]any, 0, len(repoIDs))
|
||||
for _, repoID := range repoIDs {
|
||||
repoStrs = append(repoStrs, repoID)
|
||||
}
|
||||
|
|
|
@ -130,7 +130,7 @@ func (b *MeilisearchIndexer) Search(ctx context.Context, keyword string, repoIDs
|
|||
hits := make([]Match, 0, len(searchRes.Hits))
|
||||
for _, hit := range searchRes.Hits {
|
||||
hits = append(hits, Match{
|
||||
ID: int64(hit.(map[string]interface{})["id"].(float64)),
|
||||
ID: int64(hit.(map[string]any)["id"].(float64)),
|
||||
})
|
||||
}
|
||||
return &SearchResult{
|
||||
|
|
|
@ -151,7 +151,7 @@ func validateOptions(field *api.IssueFormField, idx int) error {
|
|||
}
|
||||
position := newErrorPosition(idx, field.Type)
|
||||
|
||||
options, ok := field.Attributes["options"].([]interface{})
|
||||
options, ok := field.Attributes["options"].([]any)
|
||||
if !ok || len(options) == 0 {
|
||||
return position.Errorf("'options' is required and should be a array")
|
||||
}
|
||||
|
@ -164,7 +164,7 @@ func validateOptions(field *api.IssueFormField, idx int) error {
|
|||
return position.Errorf("should be a string")
|
||||
}
|
||||
case api.IssueFormFieldTypeCheckboxes:
|
||||
opt, ok := option.(map[string]interface{})
|
||||
opt, ok := option.(map[string]any)
|
||||
if !ok {
|
||||
return position.Errorf("should be a dictionary")
|
||||
}
|
||||
|
@ -182,7 +182,7 @@ func validateOptions(field *api.IssueFormField, idx int) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func validateStringItem(position errorPosition, m map[string]interface{}, required bool, names ...string) error {
|
||||
func validateStringItem(position errorPosition, m map[string]any, required bool, names ...string) error {
|
||||
for _, name := range names {
|
||||
v, ok := m[name]
|
||||
if !ok {
|
||||
|
@ -202,7 +202,7 @@ func validateStringItem(position errorPosition, m map[string]interface{}, requir
|
|||
return nil
|
||||
}
|
||||
|
||||
func validateBoolItem(position errorPosition, m map[string]interface{}, names ...string) error {
|
||||
func validateBoolItem(position errorPosition, m map[string]any, names ...string) error {
|
||||
for _, name := range names {
|
||||
v, ok := m[name]
|
||||
if !ok {
|
||||
|
@ -217,7 +217,7 @@ func validateBoolItem(position errorPosition, m map[string]interface{}, names ..
|
|||
|
||||
type errorPosition string
|
||||
|
||||
func (p errorPosition) Errorf(format string, a ...interface{}) error {
|
||||
func (p errorPosition) Errorf(format string, a ...any) error {
|
||||
return fmt.Errorf(string(p)+": "+format, a...)
|
||||
}
|
||||
|
||||
|
@ -332,7 +332,7 @@ func (f *valuedField) Value() string {
|
|||
}
|
||||
|
||||
func (f *valuedField) Options() []*valuedOption {
|
||||
if options, ok := f.Attributes["options"].([]interface{}); ok {
|
||||
if options, ok := f.Attributes["options"].([]any); ok {
|
||||
ret := make([]*valuedOption, 0, len(options))
|
||||
for i, option := range options {
|
||||
ret = append(ret, &valuedOption{
|
||||
|
@ -348,7 +348,7 @@ func (f *valuedField) Options() []*valuedOption {
|
|||
|
||||
type valuedOption struct {
|
||||
index int
|
||||
data interface{}
|
||||
data any
|
||||
field *valuedField
|
||||
}
|
||||
|
||||
|
@ -359,7 +359,7 @@ func (o *valuedOption) Label() string {
|
|||
return label
|
||||
}
|
||||
case api.IssueFormFieldTypeCheckboxes:
|
||||
if vs, ok := o.data.(map[string]interface{}); ok {
|
||||
if vs, ok := o.data.(map[string]any); ok {
|
||||
if v, ok := vs["label"].(string); ok {
|
||||
return v
|
||||
}
|
||||
|
|
|
@ -387,34 +387,34 @@ body:
|
|||
{
|
||||
Type: "markdown",
|
||||
ID: "id1",
|
||||
Attributes: map[string]interface{}{
|
||||
Attributes: map[string]any{
|
||||
"value": "Value of the markdown",
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: "textarea",
|
||||
ID: "id2",
|
||||
Attributes: map[string]interface{}{
|
||||
Attributes: map[string]any{
|
||||
"label": "Label of textarea",
|
||||
"description": "Description of textarea",
|
||||
"placeholder": "Placeholder of textarea",
|
||||
"value": "Value of textarea",
|
||||
"render": "bash",
|
||||
},
|
||||
Validations: map[string]interface{}{
|
||||
Validations: map[string]any{
|
||||
"required": true,
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: "input",
|
||||
ID: "id3",
|
||||
Attributes: map[string]interface{}{
|
||||
Attributes: map[string]any{
|
||||
"label": "Label of input",
|
||||
"description": "Description of input",
|
||||
"placeholder": "Placeholder of input",
|
||||
"value": "Value of input",
|
||||
},
|
||||
Validations: map[string]interface{}{
|
||||
Validations: map[string]any{
|
||||
"required": true,
|
||||
"is_number": true,
|
||||
"regex": "[a-zA-Z0-9]+",
|
||||
|
@ -423,30 +423,30 @@ body:
|
|||
{
|
||||
Type: "dropdown",
|
||||
ID: "id4",
|
||||
Attributes: map[string]interface{}{
|
||||
Attributes: map[string]any{
|
||||
"label": "Label of dropdown",
|
||||
"description": "Description of dropdown",
|
||||
"multiple": true,
|
||||
"options": []interface{}{
|
||||
"options": []any{
|
||||
"Option 1 of dropdown",
|
||||
"Option 2 of dropdown",
|
||||
"Option 3 of dropdown",
|
||||
},
|
||||
},
|
||||
Validations: map[string]interface{}{
|
||||
Validations: map[string]any{
|
||||
"required": true,
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: "checkboxes",
|
||||
ID: "id5",
|
||||
Attributes: map[string]interface{}{
|
||||
Attributes: map[string]any{
|
||||
"label": "Label of checkboxes",
|
||||
"description": "Description of checkboxes",
|
||||
"options": []interface{}{
|
||||
map[string]interface{}{"label": "Option 1 of checkboxes", "required": true},
|
||||
map[string]interface{}{"label": "Option 2 of checkboxes", "required": false},
|
||||
map[string]interface{}{"label": "Option 3 of checkboxes", "required": true},
|
||||
"options": []any{
|
||||
map[string]any{"label": "Option 1 of checkboxes", "required": true},
|
||||
map[string]any{"label": "Option 2 of checkboxes", "required": false},
|
||||
map[string]any{"label": "Option 3 of checkboxes", "required": true},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
@ -479,7 +479,7 @@ body:
|
|||
{
|
||||
Type: "markdown",
|
||||
ID: "id1",
|
||||
Attributes: map[string]interface{}{
|
||||
Attributes: map[string]any{
|
||||
"value": "Value of the markdown",
|
||||
},
|
||||
},
|
||||
|
@ -512,7 +512,7 @@ body:
|
|||
{
|
||||
Type: "markdown",
|
||||
ID: "id1",
|
||||
Attributes: map[string]interface{}{
|
||||
Attributes: map[string]any{
|
||||
"value": "Value of the markdown",
|
||||
},
|
||||
},
|
||||
|
@ -545,7 +545,7 @@ body:
|
|||
{
|
||||
Type: "markdown",
|
||||
ID: "id1",
|
||||
Attributes: map[string]interface{}{
|
||||
Attributes: map[string]any{
|
||||
"value": "Value of the markdown",
|
||||
},
|
||||
},
|
||||
|
|
|
@ -15,18 +15,18 @@ import (
|
|||
|
||||
// Encoder represents an encoder for json
|
||||
type Encoder interface {
|
||||
Encode(v interface{}) error
|
||||
Encode(v any) error
|
||||
}
|
||||
|
||||
// Decoder represents a decoder for json
|
||||
type Decoder interface {
|
||||
Decode(v interface{}) error
|
||||
Decode(v any) error
|
||||
}
|
||||
|
||||
// Interface represents an interface to handle json data
|
||||
type Interface interface {
|
||||
Marshal(v interface{}) ([]byte, error)
|
||||
Unmarshal(data []byte, v interface{}) error
|
||||
Marshal(v any) ([]byte, error)
|
||||
Unmarshal(data []byte, v any) error
|
||||
NewEncoder(writer io.Writer) Encoder
|
||||
NewDecoder(reader io.Reader) Decoder
|
||||
Indent(dst *bytes.Buffer, src []byte, prefix, indent string) error
|
||||
|
@ -44,12 +44,12 @@ var (
|
|||
type StdJSON struct{}
|
||||
|
||||
// Marshal implements Interface
|
||||
func (StdJSON) Marshal(v interface{}) ([]byte, error) {
|
||||
func (StdJSON) Marshal(v any) ([]byte, error) {
|
||||
return json.Marshal(v)
|
||||
}
|
||||
|
||||
// Unmarshal implements Interface
|
||||
func (StdJSON) Unmarshal(data []byte, v interface{}) error {
|
||||
func (StdJSON) Unmarshal(data []byte, v any) error {
|
||||
return json.Unmarshal(data, v)
|
||||
}
|
||||
|
||||
|
@ -74,12 +74,12 @@ type JSONiter struct {
|
|||
}
|
||||
|
||||
// Marshal implements Interface
|
||||
func (j JSONiter) Marshal(v interface{}) ([]byte, error) {
|
||||
func (j JSONiter) Marshal(v any) ([]byte, error) {
|
||||
return j.API.Marshal(v)
|
||||
}
|
||||
|
||||
// Unmarshal implements Interface
|
||||
func (j JSONiter) Unmarshal(data []byte, v interface{}) error {
|
||||
func (j JSONiter) Unmarshal(data []byte, v any) error {
|
||||
return j.API.Unmarshal(data, v)
|
||||
}
|
||||
|
||||
|
@ -99,12 +99,12 @@ func (j JSONiter) Indent(dst *bytes.Buffer, src []byte, prefix, indent string) e
|
|||
}
|
||||
|
||||
// Marshal converts object as bytes
|
||||
func Marshal(v interface{}) ([]byte, error) {
|
||||
func Marshal(v any) ([]byte, error) {
|
||||
return DefaultJSONHandler.Marshal(v)
|
||||
}
|
||||
|
||||
// Unmarshal decodes object from bytes
|
||||
func Unmarshal(data []byte, v interface{}) error {
|
||||
func Unmarshal(data []byte, v any) error {
|
||||
return DefaultJSONHandler.Unmarshal(data, v)
|
||||
}
|
||||
|
||||
|
@ -124,7 +124,7 @@ func Indent(dst *bytes.Buffer, src []byte, prefix, indent string) error {
|
|||
}
|
||||
|
||||
// MarshalIndent copied from encoding/json
|
||||
func MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) {
|
||||
func MarshalIndent(v any, prefix, indent string) ([]byte, error) {
|
||||
b, err := Marshal(v)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@ -144,7 +144,7 @@ func Valid(data []byte) bool {
|
|||
|
||||
// UnmarshalHandleDoubleEncode - due to a bug in xorm (see https://gitea.com/xorm/xorm/pulls/1957) - it's
|
||||
// possible that a Blob may be double encoded or gain an unwanted prefix of 0xff 0xfe.
|
||||
func UnmarshalHandleDoubleEncode(bs []byte, v interface{}) error {
|
||||
func UnmarshalHandleDoubleEncode(bs []byte, v any) error {
|
||||
err := json.Unmarshal(bs, v)
|
||||
if err != nil {
|
||||
ok := true
|
||||
|
|
|
@ -120,7 +120,7 @@ func TestRender_IssueIndexPattern2(t *testing.T) {
|
|||
isExternal = true
|
||||
}
|
||||
|
||||
links := make([]interface{}, len(indices))
|
||||
links := make([]any, len(indices))
|
||||
for i, index := range indices {
|
||||
links[i] = numericIssueLink(util.URLJoin(TestRepoURL, path), "ref-issue", index, marker)
|
||||
}
|
||||
|
@ -204,7 +204,7 @@ func TestRender_IssueIndexPattern4(t *testing.T) {
|
|||
|
||||
// alphanumeric: render inputs with valid mentions
|
||||
test := func(s, expectedFmt string, names ...string) {
|
||||
links := make([]interface{}, len(names))
|
||||
links := make([]any, len(names))
|
||||
for i, name := range names {
|
||||
links[i] = externalIssueLink("https://someurl.com/someUser/someRepo/", "ref-issue ref-external-issue", name)
|
||||
}
|
||||
|
@ -226,7 +226,7 @@ func TestRender_IssueIndexPattern5(t *testing.T) {
|
|||
test := func(s, expectedFmt, pattern string, ids, names []string) {
|
||||
metas := regexpMetas
|
||||
metas["regexp"] = pattern
|
||||
links := make([]interface{}, len(ids))
|
||||
links := make([]any, len(ids))
|
||||
for i, id := range ids {
|
||||
links[i] = link(util.URLJoin("https://someurl.com/someUser/someRepo/", id), "ref-issue ref-external-issue", names[i])
|
||||
}
|
||||
|
|
|
@ -55,14 +55,14 @@ func isYAMLSeparator(line []byte) bool {
|
|||
|
||||
// ExtractMetadata consumes a markdown file, parses YAML frontmatter,
|
||||
// and returns the frontmatter metadata separated from the markdown content
|
||||
func ExtractMetadata(contents string, out interface{}) (string, error) {
|
||||
func ExtractMetadata(contents string, out any) (string, error) {
|
||||
body, err := ExtractMetadataBytes([]byte(contents), out)
|
||||
return string(body), err
|
||||
}
|
||||
|
||||
// ExtractMetadata consumes a markdown file, parses YAML frontmatter,
|
||||
// and returns the frontmatter metadata separated from the markdown content
|
||||
func ExtractMetadataBytes(contents []byte, out interface{}) ([]byte, error) {
|
||||
func ExtractMetadataBytes(contents []byte, out any) ([]byte, error) {
|
||||
var front, body []byte
|
||||
|
||||
start, end := 0, len(contents)
|
||||
|
|
|
@ -24,7 +24,7 @@ type Comment struct {
|
|||
Updated time.Time
|
||||
Content string
|
||||
Reactions []*Reaction
|
||||
Meta map[string]interface{} `yaml:"meta,omitempty"` // see models/issues/comment.go for fields in Comment struct
|
||||
Meta map[string]any `yaml:"meta,omitempty"` // see models/issues/comment.go for fields in Comment struct
|
||||
}
|
||||
|
||||
// GetExternalName ExternalUserMigrated interface
|
||||
|
|
|
@ -34,4 +34,4 @@ type DownloaderFactory interface {
|
|||
}
|
||||
|
||||
// DownloaderContext has opaque information only relevant to a given downloader
|
||||
type DownloaderContext interface{}
|
||||
type DownloaderContext any
|
||||
|
|
|
@ -17,7 +17,7 @@ import (
|
|||
)
|
||||
|
||||
// Load project data from file, with optional validation
|
||||
func Load(filename string, data interface{}, validation bool) error {
|
||||
func Load(filename string, data any, validation bool) error {
|
||||
isJSON := strings.HasSuffix(filename, ".json")
|
||||
|
||||
bs, err := os.ReadFile(filename)
|
||||
|
@ -34,7 +34,7 @@ func Load(filename string, data interface{}, validation bool) error {
|
|||
return unmarshal(bs, data, isJSON)
|
||||
}
|
||||
|
||||
func unmarshal(bs []byte, data interface{}, isJSON bool) error {
|
||||
func unmarshal(bs []byte, data any, isJSON bool) error {
|
||||
if isJSON {
|
||||
return json.Unmarshal(bs, data)
|
||||
}
|
||||
|
@ -47,8 +47,8 @@ func getSchema(filename string) (*jsonschema.Schema, error) {
|
|||
return c.Compile(filename)
|
||||
}
|
||||
|
||||
func validate(bs []byte, datatype interface{}, isJSON bool) error {
|
||||
var v interface{}
|
||||
func validate(bs []byte, datatype any, isJSON bool) error {
|
||||
var v any
|
||||
err := unmarshal(bs, &v, isJSON)
|
||||
if err != nil {
|
||||
return err
|
||||
|
@ -81,11 +81,11 @@ func validate(bs []byte, datatype interface{}, isJSON bool) error {
|
|||
return err
|
||||
}
|
||||
|
||||
func toStringKeys(val interface{}) (interface{}, error) {
|
||||
func toStringKeys(val any) (any, error) {
|
||||
var err error
|
||||
switch val := val.(type) {
|
||||
case map[string]interface{}:
|
||||
m := make(map[string]interface{})
|
||||
case map[string]any:
|
||||
m := make(map[string]any)
|
||||
for k, v := range val {
|
||||
m[k], err = toStringKeys(v)
|
||||
if err != nil {
|
||||
|
@ -93,8 +93,8 @@ func toStringKeys(val interface{}) (interface{}, error) {
|
|||
}
|
||||
}
|
||||
return m, nil
|
||||
case []interface{}:
|
||||
l := make([]interface{}, len(val))
|
||||
case []any:
|
||||
l := make([]any, len(val))
|
||||
for i, v := range val {
|
||||
l[i], err = toStringKeys(v)
|
||||
if err != nil {
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
package migration
|
||||
|
||||
// Messenger is a formatting function similar to i18n.Tr
|
||||
type Messenger func(key string, args ...interface{})
|
||||
type Messenger func(key string, args ...any)
|
||||
|
||||
// NilMessenger represents an empty formatting function
|
||||
func NilMessenger(string, ...interface{}) {}
|
||||
func NilMessenger(string, ...any) {}
|
||||
|
|
|
@ -54,7 +54,7 @@ func (m *Manager) GetLevelDB(connection string) (db *leveldb.DB, err error) {
|
|||
// Because we want associate any goroutines created by this call to the main nosqldb context we need to
|
||||
// wrap this in a goroutine labelled with the nosqldb context
|
||||
done := make(chan struct{})
|
||||
var recovered interface{}
|
||||
var recovered any
|
||||
go func() {
|
||||
defer func() {
|
||||
recovered = recover()
|
||||
|
|
|
@ -47,7 +47,7 @@ func (m *Manager) GetRedisClient(connection string) (client redis.UniversalClien
|
|||
// Because we want associate any goroutines created by this call to the main nosqldb context we need to
|
||||
// wrap this in a goroutine labelled with the nosqldb context
|
||||
done := make(chan struct{})
|
||||
var recovered interface{}
|
||||
var recovered any
|
||||
go func() {
|
||||
defer func() {
|
||||
recovered = recover()
|
||||
|
|
|
@ -38,18 +38,18 @@ type Package struct {
|
|||
|
||||
// Metadata represents the metadata of a Composer package
|
||||
type Metadata struct {
|
||||
Description string `json:"description,omitempty"`
|
||||
Keywords []string `json:"keywords,omitempty"`
|
||||
Homepage string `json:"homepage,omitempty"`
|
||||
License Licenses `json:"license,omitempty"`
|
||||
Authors []Author `json:"authors,omitempty"`
|
||||
Autoload map[string]interface{} `json:"autoload,omitempty"`
|
||||
AutoloadDev map[string]interface{} `json:"autoload-dev,omitempty"`
|
||||
Extra map[string]interface{} `json:"extra,omitempty"`
|
||||
Require map[string]string `json:"require,omitempty"`
|
||||
RequireDev map[string]string `json:"require-dev,omitempty"`
|
||||
Suggest map[string]string `json:"suggest,omitempty"`
|
||||
Provide map[string]string `json:"provide,omitempty"`
|
||||
Description string `json:"description,omitempty"`
|
||||
Keywords []string `json:"keywords,omitempty"`
|
||||
Homepage string `json:"homepage,omitempty"`
|
||||
License Licenses `json:"license,omitempty"`
|
||||
Authors []Author `json:"authors,omitempty"`
|
||||
Autoload map[string]any `json:"autoload,omitempty"`
|
||||
AutoloadDev map[string]any `json:"autoload-dev,omitempty"`
|
||||
Extra map[string]any `json:"extra,omitempty"`
|
||||
Require map[string]string `json:"require,omitempty"`
|
||||
RequireDev map[string]string `json:"require-dev,omitempty"`
|
||||
Suggest map[string]string `json:"suggest,omitempty"`
|
||||
Provide map[string]string `json:"provide,omitempty"`
|
||||
}
|
||||
|
||||
// Licenses represents the licenses of a Composer package
|
||||
|
|
|
@ -55,14 +55,14 @@ type Maintainer struct {
|
|||
}
|
||||
|
||||
type Dependency struct {
|
||||
Name string `json:"name" yaml:"name"`
|
||||
Version string `json:"version,omitempty" yaml:"version,omitempty"`
|
||||
Repository string `json:"repository" yaml:"repository"`
|
||||
Condition string `json:"condition,omitempty" yaml:"condition,omitempty"`
|
||||
Tags []string `json:"tags,omitempty" yaml:"tags,omitempty"`
|
||||
Enabled bool `json:"enabled,omitempty" yaml:"enabled,omitempty"`
|
||||
ImportValues []interface{} `json:"import_values,omitempty" yaml:"import-values,omitempty"`
|
||||
Alias string `json:"alias,omitempty" yaml:"alias,omitempty"`
|
||||
Name string `json:"name" yaml:"name"`
|
||||
Version string `json:"version,omitempty" yaml:"version,omitempty"`
|
||||
Repository string `json:"repository" yaml:"repository"`
|
||||
Condition string `json:"condition,omitempty" yaml:"condition,omitempty"`
|
||||
Tags []string `json:"tags,omitempty" yaml:"tags,omitempty"`
|
||||
Enabled bool `json:"enabled,omitempty" yaml:"enabled,omitempty"`
|
||||
ImportValues []any `json:"import_values,omitempty" yaml:"import-values,omitempty"`
|
||||
Alias string `json:"alias,omitempty" yaml:"alias,omitempty"`
|
||||
}
|
||||
|
||||
// ParseChartArchive parses the metadata of a Helm archive
|
||||
|
|
|
@ -38,12 +38,12 @@ type Package struct {
|
|||
|
||||
// Metadata represents the metadata of a Pub package
|
||||
type Metadata struct {
|
||||
Description string `json:"description,omitempty"`
|
||||
ProjectURL string `json:"project_url,omitempty"`
|
||||
RepositoryURL string `json:"repository_url,omitempty"`
|
||||
DocumentationURL string `json:"documentation_url,omitempty"`
|
||||
Readme string `json:"readme,omitempty"`
|
||||
Pubspec interface{} `json:"pubspec"`
|
||||
Description string `json:"description,omitempty"`
|
||||
ProjectURL string `json:"project_url,omitempty"`
|
||||
RepositoryURL string `json:"repository_url,omitempty"`
|
||||
DocumentationURL string `json:"documentation_url,omitempty"`
|
||||
Readme string `json:"readme,omitempty"`
|
||||
Pubspec any `json:"pubspec"`
|
||||
}
|
||||
|
||||
type pubspecPackage struct {
|
||||
|
@ -134,7 +134,7 @@ func ParsePubspecMetadata(r io.Reader) (*Package, error) {
|
|||
p.Repository = ""
|
||||
}
|
||||
|
||||
var pubspec interface{}
|
||||
var pubspec any
|
||||
if err := yaml.Unmarshal(buf, &pubspec); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
@ -40,19 +40,19 @@ var (
|
|||
// RubyUserMarshal is a Ruby object that has a marshal_load function.
|
||||
type RubyUserMarshal struct {
|
||||
Name string
|
||||
Value interface{}
|
||||
Value any
|
||||
}
|
||||
|
||||
// RubyUserDef is a Ruby object that has a _load function.
|
||||
type RubyUserDef struct {
|
||||
Name string
|
||||
Value interface{}
|
||||
Value any
|
||||
}
|
||||
|
||||
// RubyObject is a default Ruby object.
|
||||
type RubyObject struct {
|
||||
Name string
|
||||
Member map[string]interface{}
|
||||
Member map[string]any
|
||||
}
|
||||
|
||||
// MarshalEncoder mimics Rubys Marshal class.
|
||||
|
@ -71,7 +71,7 @@ func NewMarshalEncoder(w io.Writer) *MarshalEncoder {
|
|||
}
|
||||
|
||||
// Encode encodes the given type
|
||||
func (e *MarshalEncoder) Encode(v interface{}) error {
|
||||
func (e *MarshalEncoder) Encode(v any) error {
|
||||
if _, err := e.w.Write([]byte{majorVersion, minorVersion}); err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -83,7 +83,7 @@ func (e *MarshalEncoder) Encode(v interface{}) error {
|
|||
return e.w.Flush()
|
||||
}
|
||||
|
||||
func (e *MarshalEncoder) marshal(v interface{}) error {
|
||||
func (e *MarshalEncoder) marshal(v any) error {
|
||||
if v == nil {
|
||||
return e.marshalNil()
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ import (
|
|||
|
||||
func TestMinimalEncoder(t *testing.T) {
|
||||
cases := []struct {
|
||||
Value interface{}
|
||||
Value any
|
||||
Expected []byte
|
||||
Error error
|
||||
}{
|
||||
|
@ -73,7 +73,7 @@ func TestMinimalEncoder(t *testing.T) {
|
|||
{
|
||||
Value: &RubyObject{
|
||||
Name: "Test",
|
||||
Member: map[string]interface{}{
|
||||
Member: map[string]any{
|
||||
"test": 4,
|
||||
},
|
||||
},
|
||||
|
|
|
@ -65,12 +65,12 @@ type gemspec struct {
|
|||
Version struct {
|
||||
Version string `yaml:"version"`
|
||||
} `yaml:"version"`
|
||||
Platform string `yaml:"platform"`
|
||||
Authors []string `yaml:"authors"`
|
||||
Autorequire interface{} `yaml:"autorequire"`
|
||||
Bindir string `yaml:"bindir"`
|
||||
CertChain []interface{} `yaml:"cert_chain"`
|
||||
Date string `yaml:"date"`
|
||||
Platform string `yaml:"platform"`
|
||||
Authors []string `yaml:"authors"`
|
||||
Autorequire any `yaml:"autorequire"`
|
||||
Bindir string `yaml:"bindir"`
|
||||
CertChain []any `yaml:"cert_chain"`
|
||||
Date string `yaml:"date"`
|
||||
Dependencies []struct {
|
||||
Name string `yaml:"name"`
|
||||
Requirement requirement `yaml:"requirement"`
|
||||
|
@ -78,34 +78,34 @@ type gemspec struct {
|
|||
Prerelease bool `yaml:"prerelease"`
|
||||
VersionRequirements requirement `yaml:"version_requirements"`
|
||||
} `yaml:"dependencies"`
|
||||
Description string `yaml:"description"`
|
||||
Executables []string `yaml:"executables"`
|
||||
Extensions []interface{} `yaml:"extensions"`
|
||||
ExtraRdocFiles []string `yaml:"extra_rdoc_files"`
|
||||
Files []string `yaml:"files"`
|
||||
Homepage string `yaml:"homepage"`
|
||||
Licenses []string `yaml:"licenses"`
|
||||
Description string `yaml:"description"`
|
||||
Executables []string `yaml:"executables"`
|
||||
Extensions []any `yaml:"extensions"`
|
||||
ExtraRdocFiles []string `yaml:"extra_rdoc_files"`
|
||||
Files []string `yaml:"files"`
|
||||
Homepage string `yaml:"homepage"`
|
||||
Licenses []string `yaml:"licenses"`
|
||||
Metadata struct {
|
||||
BugTrackerURI string `yaml:"bug_tracker_uri"`
|
||||
ChangelogURI string `yaml:"changelog_uri"`
|
||||
DocumentationURI string `yaml:"documentation_uri"`
|
||||
SourceCodeURI string `yaml:"source_code_uri"`
|
||||
} `yaml:"metadata"`
|
||||
PostInstallMessage interface{} `yaml:"post_install_message"`
|
||||
RdocOptions []interface{} `yaml:"rdoc_options"`
|
||||
RequirePaths []string `yaml:"require_paths"`
|
||||
RequiredRubyVersion requirement `yaml:"required_ruby_version"`
|
||||
RequiredRubygemsVersion requirement `yaml:"required_rubygems_version"`
|
||||
Requirements []interface{} `yaml:"requirements"`
|
||||
RubygemsVersion string `yaml:"rubygems_version"`
|
||||
SigningKey interface{} `yaml:"signing_key"`
|
||||
SpecificationVersion int `yaml:"specification_version"`
|
||||
Summary string `yaml:"summary"`
|
||||
TestFiles []interface{} `yaml:"test_files"`
|
||||
PostInstallMessage any `yaml:"post_install_message"`
|
||||
RdocOptions []any `yaml:"rdoc_options"`
|
||||
RequirePaths []string `yaml:"require_paths"`
|
||||
RequiredRubyVersion requirement `yaml:"required_ruby_version"`
|
||||
RequiredRubygemsVersion requirement `yaml:"required_rubygems_version"`
|
||||
Requirements []any `yaml:"requirements"`
|
||||
RubygemsVersion string `yaml:"rubygems_version"`
|
||||
SigningKey any `yaml:"signing_key"`
|
||||
SpecificationVersion int `yaml:"specification_version"`
|
||||
Summary string `yaml:"summary"`
|
||||
TestFiles []any `yaml:"test_files"`
|
||||
}
|
||||
|
||||
type requirement struct {
|
||||
Requirements [][]interface{} `yaml:"requirements"`
|
||||
Requirements [][]any `yaml:"requirements"`
|
||||
}
|
||||
|
||||
// AsVersionRequirement converts into []VersionRequirement
|
||||
|
@ -119,7 +119,7 @@ func (r requirement) AsVersionRequirement() []VersionRequirement {
|
|||
if !ok {
|
||||
continue
|
||||
}
|
||||
vm, ok := req[1].(map[string]interface{})
|
||||
vm, ok := req[1].(map[string]any)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
|
|
@ -85,11 +85,11 @@ type LoggerOptions struct {
|
|||
Logger string
|
||||
Writer string
|
||||
Mode string
|
||||
Config map[string]interface{}
|
||||
Config map[string]any
|
||||
}
|
||||
|
||||
// AddLogger adds a logger
|
||||
func AddLogger(ctx context.Context, logger, writer, mode string, config map[string]interface{}) ResponseExtra {
|
||||
func AddLogger(ctx context.Context, logger, writer, mode string, config map[string]any) ResponseExtra {
|
||||
reqURL := setting.LocalURL + "api/internal/manager/add-logger"
|
||||
req := newInternalRequest(ctx, reqURL, "POST", LoggerOptions{
|
||||
Logger: logger,
|
||||
|
|
|
@ -24,7 +24,7 @@ func (c *Context) GetParent() *Context {
|
|||
}
|
||||
|
||||
// Value is part of the interface for context.Context. We mostly defer to the internal context - but we return this in response to the ProcessContextKey
|
||||
func (c *Context) Value(key interface{}) interface{} {
|
||||
func (c *Context) Value(key any) any {
|
||||
if key == ProcessContextKey {
|
||||
return c
|
||||
}
|
||||
|
@ -32,7 +32,7 @@ func (c *Context) Value(key interface{}) interface{} {
|
|||
}
|
||||
|
||||
// ProcessContextKey is the key under which process contexts are stored
|
||||
var ProcessContextKey interface{} = "process-context"
|
||||
var ProcessContextKey any = "process-context"
|
||||
|
||||
// GetContext will return a process context if one exists
|
||||
func GetContext(ctx context.Context) *Context {
|
||||
|
|
|
@ -17,11 +17,11 @@ import (
|
|||
type DBStore struct {
|
||||
sid string
|
||||
lock sync.RWMutex
|
||||
data map[interface{}]interface{}
|
||||
data map[any]any
|
||||
}
|
||||
|
||||
// NewDBStore creates and returns a DB session store.
|
||||
func NewDBStore(sid string, kv map[interface{}]interface{}) *DBStore {
|
||||
func NewDBStore(sid string, kv map[any]any) *DBStore {
|
||||
return &DBStore{
|
||||
sid: sid,
|
||||
data: kv,
|
||||
|
@ -29,7 +29,7 @@ func NewDBStore(sid string, kv map[interface{}]interface{}) *DBStore {
|
|||
}
|
||||
|
||||
// Set sets value to given key in session.
|
||||
func (s *DBStore) Set(key, val interface{}) error {
|
||||
func (s *DBStore) Set(key, val any) error {
|
||||
s.lock.Lock()
|
||||
defer s.lock.Unlock()
|
||||
|
||||
|
@ -38,7 +38,7 @@ func (s *DBStore) Set(key, val interface{}) error {
|
|||
}
|
||||
|
||||
// Get gets value by given key in session.
|
||||
func (s *DBStore) Get(key interface{}) interface{} {
|
||||
func (s *DBStore) Get(key any) any {
|
||||
s.lock.RLock()
|
||||
defer s.lock.RUnlock()
|
||||
|
||||
|
@ -46,7 +46,7 @@ func (s *DBStore) Get(key interface{}) interface{} {
|
|||
}
|
||||
|
||||
// Delete delete a key from session.
|
||||
func (s *DBStore) Delete(key interface{}) error {
|
||||
func (s *DBStore) Delete(key any) error {
|
||||
s.lock.Lock()
|
||||
defer s.lock.Unlock()
|
||||
|
||||
|
@ -79,7 +79,7 @@ func (s *DBStore) Flush() error {
|
|||
s.lock.Lock()
|
||||
defer s.lock.Unlock()
|
||||
|
||||
s.data = make(map[interface{}]interface{})
|
||||
s.data = make(map[any]any)
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -102,9 +102,9 @@ func (p *DBProvider) Read(sid string) (session.RawStore, error) {
|
|||
return nil, err
|
||||
}
|
||||
|
||||
var kv map[interface{}]interface{}
|
||||
var kv map[any]any
|
||||
if len(s.Data) == 0 || s.Expiry.Add(p.maxLifetime) <= timeutil.TimeStampNow() {
|
||||
kv = make(map[interface{}]interface{})
|
||||
kv = make(map[any]any)
|
||||
} else {
|
||||
kv, err = session.DecodeGob(s.Data)
|
||||
if err != nil {
|
||||
|
@ -136,9 +136,9 @@ func (p *DBProvider) Regenerate(oldsid, sid string) (_ session.RawStore, err err
|
|||
return nil, err
|
||||
}
|
||||
|
||||
var kv map[interface{}]interface{}
|
||||
var kv map[any]any
|
||||
if len(s.Data) == 0 || s.Expiry.Add(p.maxLifetime) <= timeutil.TimeStampNow() {
|
||||
kv = make(map[interface{}]interface{})
|
||||
kv = make(map[any]any)
|
||||
} else {
|
||||
kv, err = session.DecodeGob(s.Data)
|
||||
if err != nil {
|
||||
|
|
|
@ -35,11 +35,11 @@ type RedisStore struct {
|
|||
prefix, sid string
|
||||
duration time.Duration
|
||||
lock sync.RWMutex
|
||||
data map[interface{}]interface{}
|
||||
data map[any]any
|
||||
}
|
||||
|
||||
// NewRedisStore creates and returns a redis session store.
|
||||
func NewRedisStore(c redis.UniversalClient, prefix, sid string, dur time.Duration, kv map[interface{}]interface{}) *RedisStore {
|
||||
func NewRedisStore(c redis.UniversalClient, prefix, sid string, dur time.Duration, kv map[any]any) *RedisStore {
|
||||
return &RedisStore{
|
||||
c: c,
|
||||
prefix: prefix,
|
||||
|
@ -50,7 +50,7 @@ func NewRedisStore(c redis.UniversalClient, prefix, sid string, dur time.Duratio
|
|||
}
|
||||
|
||||
// Set sets value to given key in session.
|
||||
func (s *RedisStore) Set(key, val interface{}) error {
|
||||
func (s *RedisStore) Set(key, val any) error {
|
||||
s.lock.Lock()
|
||||
defer s.lock.Unlock()
|
||||
|
||||
|
@ -59,7 +59,7 @@ func (s *RedisStore) Set(key, val interface{}) error {
|
|||
}
|
||||
|
||||
// Get gets value by given key in session.
|
||||
func (s *RedisStore) Get(key interface{}) interface{} {
|
||||
func (s *RedisStore) Get(key any) any {
|
||||
s.lock.RLock()
|
||||
defer s.lock.RUnlock()
|
||||
|
||||
|
@ -67,7 +67,7 @@ func (s *RedisStore) Get(key interface{}) interface{} {
|
|||
}
|
||||
|
||||
// Delete delete a key from session.
|
||||
func (s *RedisStore) Delete(key interface{}) error {
|
||||
func (s *RedisStore) Delete(key any) error {
|
||||
s.lock.Lock()
|
||||
defer s.lock.Unlock()
|
||||
|
||||
|
@ -100,7 +100,7 @@ func (s *RedisStore) Flush() error {
|
|||
s.lock.Lock()
|
||||
defer s.lock.Unlock()
|
||||
|
||||
s.data = make(map[interface{}]interface{})
|
||||
s.data = make(map[any]any)
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -141,13 +141,13 @@ func (p *RedisProvider) Read(sid string) (session.RawStore, error) {
|
|||
}
|
||||
}
|
||||
|
||||
var kv map[interface{}]interface{}
|
||||
var kv map[any]any
|
||||
kvs, err := p.c.Get(graceful.GetManager().HammerContext(), psid).Result()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(kvs) == 0 {
|
||||
kv = make(map[interface{}]interface{})
|
||||
kv = make(map[any]any)
|
||||
} else {
|
||||
kv, err = session.DecodeGob([]byte(kvs))
|
||||
if err != nil {
|
||||
|
@ -197,9 +197,9 @@ func (p *RedisProvider) Regenerate(oldsid, sid string) (_ session.RawStore, err
|
|||
return nil, err
|
||||
}
|
||||
|
||||
var kv map[interface{}]interface{}
|
||||
var kv map[any]any
|
||||
if len(kvs) == 0 {
|
||||
kv = make(map[interface{}]interface{})
|
||||
kv = make(map[any]any)
|
||||
} else {
|
||||
kv, err = session.DecodeGob([]byte(kvs))
|
||||
if err != nil {
|
||||
|
|
|
@ -11,9 +11,9 @@ import (
|
|||
|
||||
// Store represents a session store
|
||||
type Store interface {
|
||||
Get(interface{}) interface{}
|
||||
Set(interface{}, interface{}) error
|
||||
Delete(interface{}) error
|
||||
Get(any) any
|
||||
Set(any, any) error
|
||||
Delete(any) error
|
||||
}
|
||||
|
||||
// RegenerateSession regenerates the underlying session and returns the new store
|
||||
|
|
|
@ -62,7 +62,7 @@ func (o *VirtualSessionProvider) Read(sid string) (session.RawStore, error) {
|
|||
if o.provider.Exist(sid) {
|
||||
return o.provider.Read(sid)
|
||||
}
|
||||
kv := make(map[interface{}]interface{})
|
||||
kv := make(map[any]any)
|
||||
kv["_old_uid"] = "0"
|
||||
return NewVirtualStore(o, sid, kv), nil
|
||||
}
|
||||
|
@ -107,12 +107,12 @@ type VirtualStore struct {
|
|||
p *VirtualSessionProvider
|
||||
sid string
|
||||
lock sync.RWMutex
|
||||
data map[interface{}]interface{}
|
||||
data map[any]any
|
||||
released bool
|
||||
}
|
||||
|
||||
// NewVirtualStore creates and returns a virtual session store.
|
||||
func NewVirtualStore(p *VirtualSessionProvider, sid string, kv map[interface{}]interface{}) *VirtualStore {
|
||||
func NewVirtualStore(p *VirtualSessionProvider, sid string, kv map[any]any) *VirtualStore {
|
||||
return &VirtualStore{
|
||||
p: p,
|
||||
sid: sid,
|
||||
|
@ -121,7 +121,7 @@ func NewVirtualStore(p *VirtualSessionProvider, sid string, kv map[interface{}]i
|
|||
}
|
||||
|
||||
// Set sets value to given key in session.
|
||||
func (s *VirtualStore) Set(key, val interface{}) error {
|
||||
func (s *VirtualStore) Set(key, val any) error {
|
||||
s.lock.Lock()
|
||||
defer s.lock.Unlock()
|
||||
|
||||
|
@ -130,7 +130,7 @@ func (s *VirtualStore) Set(key, val interface{}) error {
|
|||
}
|
||||
|
||||
// Get gets value by given key in session.
|
||||
func (s *VirtualStore) Get(key interface{}) interface{} {
|
||||
func (s *VirtualStore) Get(key any) any {
|
||||
s.lock.RLock()
|
||||
defer s.lock.RUnlock()
|
||||
|
||||
|
@ -138,7 +138,7 @@ func (s *VirtualStore) Get(key interface{}) interface{} {
|
|||
}
|
||||
|
||||
// Delete delete a key from session.
|
||||
func (s *VirtualStore) Delete(key interface{}) error {
|
||||
func (s *VirtualStore) Delete(key any) error {
|
||||
s.lock.Lock()
|
||||
defer s.lock.Unlock()
|
||||
|
||||
|
@ -192,6 +192,6 @@ func (s *VirtualStore) Flush() error {
|
|||
s.lock.Lock()
|
||||
defer s.lock.Unlock()
|
||||
|
||||
s.data = make(map[interface{}]interface{})
|
||||
s.data = make(map[any]any)
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -6,11 +6,11 @@ package setting
|
|||
import "reflect"
|
||||
|
||||
// GetCronSettings maps the cron subsection to the provided config
|
||||
func GetCronSettings(name string, config interface{}) (interface{}, error) {
|
||||
func GetCronSettings(name string, config any) (any, error) {
|
||||
return getCronSettings(CfgProvider, name, config)
|
||||
}
|
||||
|
||||
func getCronSettings(rootCfg ConfigProvider, name string, config interface{}) (interface{}, error) {
|
||||
func getCronSettings(rootCfg ConfigProvider, name string, config any) (any, error) {
|
||||
if err := rootCfg.Section("cron." + name).MapTo(config); err != nil {
|
||||
return config, err
|
||||
}
|
||||
|
|
|
@ -30,7 +30,7 @@ func initLoggersByConfig(t *testing.T, config string) (*log.LoggerManager, func(
|
|||
return manager, manager.Close
|
||||
}
|
||||
|
||||
func toJSON(v interface{}) string {
|
||||
func toJSON(v any) string {
|
||||
b, _ := json.MarshalIndent(v, "", "\t")
|
||||
return string(b)
|
||||
}
|
||||
|
|
|
@ -173,7 +173,7 @@ func (m minioFileInfo) Mode() os.FileMode {
|
|||
return os.ModePerm
|
||||
}
|
||||
|
||||
func (m minioFileInfo) Sys() interface{} {
|
||||
func (m minioFileInfo) Sys() any {
|
||||
return nil
|
||||
}
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@ var ErrURLNotSupported = errors.New("url method not supported")
|
|||
|
||||
// ErrInvalidConfiguration is called when there is invalid configuration for a storage
|
||||
type ErrInvalidConfiguration struct {
|
||||
cfg interface{}
|
||||
cfg any
|
||||
err error
|
||||
}
|
||||
|
||||
|
|
|
@ -140,10 +140,10 @@ const (
|
|||
// IssueFormField represents a form field
|
||||
// swagger:model
|
||||
type IssueFormField struct {
|
||||
Type IssueFormFieldType `json:"type" yaml:"type"`
|
||||
ID string `json:"id" yaml:"id"`
|
||||
Attributes map[string]interface{} `json:"attributes" yaml:"attributes"`
|
||||
Validations map[string]interface{} `json:"validations" yaml:"validations"`
|
||||
Type IssueFormFieldType `json:"type" yaml:"type"`
|
||||
ID string `json:"id" yaml:"id"`
|
||||
Attributes map[string]any `json:"attributes" yaml:"attributes"`
|
||||
Validations map[string]any `json:"validations" yaml:"validations"`
|
||||
}
|
||||
|
||||
// IssueTemplate represents an issue template for a repository
|
||||
|
|
|
@ -166,7 +166,7 @@ type FilesResponse struct {
|
|||
|
||||
// FileDeleteResponse contains information about a repo's file that was deleted
|
||||
type FileDeleteResponse struct {
|
||||
Content interface{} `json:"content"` // to be set to nil
|
||||
Content any `json:"content"` // to be set to nil
|
||||
Commit *FileCommitResponse `json:"commit"`
|
||||
Verification *PayloadCommitVerification `json:"verification"`
|
||||
}
|
||||
|
|
|
@ -9,10 +9,10 @@ import (
|
|||
|
||||
// WatchInfo represents an API watch status of one repository
|
||||
type WatchInfo struct {
|
||||
Subscribed bool `json:"subscribed"`
|
||||
Ignored bool `json:"ignored"`
|
||||
Reason interface{} `json:"reason"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
URL string `json:"url"`
|
||||
RepositoryURL string `json:"repository_url"`
|
||||
Subscribed bool `json:"subscribed"`
|
||||
Ignored bool `json:"ignored"`
|
||||
Reason any `json:"reason"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
URL string `json:"url"`
|
||||
RepositoryURL string `json:"repository_url"`
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue