Template
1
0
Fork 0
mirror of https://codeberg.org/forgejo/forgejo synced 2024-11-24 10:46:10 +01:00

Replace interface{} with any (#25686) (#25687)

Same perl replacement as https://github.com/go-gitea/gitea/pull/25686
but for 1.20 to ease future backporting.
This commit is contained in:
silverwind 2023-07-05 05:41:32 +02:00 committed by GitHub
parent 4e310133f9
commit 24e64fe372
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
233 changed files with 729 additions and 729 deletions

View file

@ -25,7 +25,7 @@ import (
var optionLogVerbose bool var optionLogVerbose bool
func logVerbose(msg string, args ...interface{}) { func logVerbose(msg string, args ...any) {
if optionLogVerbose { if optionLogVerbose {
log.Printf(msg, args...) log.Printf(msg, args...)
} }

View file

@ -63,7 +63,7 @@ Outputs to 'cert.pem' and 'key.pem' and will overwrite existing files.`,
}, },
} }
func publicKey(priv interface{}) interface{} { func publicKey(priv any) any {
switch k := priv.(type) { switch k := priv.(type) {
case *rsa.PrivateKey: case *rsa.PrivateKey:
return &k.PublicKey return &k.PublicKey
@ -74,7 +74,7 @@ func publicKey(priv interface{}) interface{} {
} }
} }
func pemBlockForKey(priv interface{}) *pem.Block { func pemBlockForKey(priv any) *pem.Block {
switch k := priv.(type) { switch k := priv.(type) {
case *rsa.PrivateKey: case *rsa.PrivateKey:
return &pem.Block{Type: "RSA PRIVATE KEY", Bytes: x509.MarshalPKCS1PrivateKey(k)} return &pem.Block{Type: "RSA PRIVATE KEY", Bytes: x509.MarshalPKCS1PrivateKey(k)}
@ -94,7 +94,7 @@ func runCert(c *cli.Context) error {
return err return err
} }
var priv interface{} var priv any
var err error var err error
switch c.String("ecdsa-curve") { switch c.String("ecdsa-curve") {
case "": case "":

View file

@ -161,7 +161,7 @@ It can be used for backup and capture Gitea server image to send to maintainer`,
}, },
} }
func fatal(format string, args ...interface{}) { func fatal(format string, args ...any) {
fmt.Fprintf(os.Stderr, format+"\n", args...) fmt.Fprintf(os.Stderr, format+"\n", args...)
log.Fatal(format, args...) log.Fatal(format, args...)
} }
@ -236,7 +236,7 @@ func runDump(ctx *cli.Context) error {
return err return err
} }
var iface interface{} var iface any
if fileName == "-" { if fileName == "-" {
iface, err = archiver.ByExtension(fmt.Sprintf(".%s", outType)) iface, err = archiver.ByExtension(fmt.Sprintf(".%s", outType))
} else { } else {

View file

@ -178,7 +178,7 @@ func runAddConnLogger(c *cli.Context) error {
defer cancel() defer cancel()
setup(ctx, c.Bool("debug")) setup(ctx, c.Bool("debug"))
vals := map[string]interface{}{} vals := map[string]any{}
mode := "conn" mode := "conn"
vals["net"] = "tcp" vals["net"] = "tcp"
if c.IsSet("protocol") { if c.IsSet("protocol") {
@ -208,7 +208,7 @@ func runAddFileLogger(c *cli.Context) error {
defer cancel() defer cancel()
setup(ctx, c.Bool("debug")) setup(ctx, c.Bool("debug"))
vals := map[string]interface{}{} vals := map[string]any{}
mode := "file" mode := "file"
if c.IsSet("filename") { if c.IsSet("filename") {
vals["filename"] = c.String("filename") vals["filename"] = c.String("filename")
@ -236,7 +236,7 @@ func runAddFileLogger(c *cli.Context) error {
return commonAddLogger(c, mode, vals) return commonAddLogger(c, mode, vals)
} }
func commonAddLogger(c *cli.Context, mode string, vals map[string]interface{}) error { func commonAddLogger(c *cli.Context, mode string, vals map[string]any) error {
if len(c.String("level")) > 0 { if len(c.String("level")) > 0 {
vals["level"] = log.LevelFromString(c.String("level")).String() vals["level"] = log.LevelFromString(c.String("level")).String()
} }

View file

@ -95,7 +95,7 @@ var (
// fail prints message to stdout, it's mainly used for git serv and git hook commands. // fail prints message to stdout, it's mainly used for git serv and git hook commands.
// The output will be passed to git client and shown to user. // The output will be passed to git client and shown to user.
func fail(ctx context.Context, userMessage, logMsgFmt string, args ...interface{}) error { func fail(ctx context.Context, userMessage, logMsgFmt string, args ...any) error {
if userMessage == "" { if userMessage == "" {
userMessage = "Internal Server Error (no specific error)" userMessage = "Internal Server Error (no specific error)"
} }

View file

@ -44,7 +44,7 @@ func init() {
// TranslatableMessage represents JSON struct that can be translated with a Locale // TranslatableMessage represents JSON struct that can be translated with a Locale
type TranslatableMessage struct { type TranslatableMessage struct {
Format string Format string
Args []interface{} `json:"omitempty"` Args []any `json:"omitempty"`
} }
// LoadRepo loads repository of the task // LoadRepo loads repository of the task

View file

@ -47,7 +47,7 @@ var sshOpLocker sync.Mutex
// AuthorizedStringForKey creates the authorized keys string appropriate for the provided key // AuthorizedStringForKey creates the authorized keys string appropriate for the provided key
func AuthorizedStringForKey(key *PublicKey) string { func AuthorizedStringForKey(key *PublicKey) string {
sb := &strings.Builder{} sb := &strings.Builder{}
_ = setting.SSH.AuthorizedKeysCommandTemplateTemplate.Execute(sb, map[string]interface{}{ _ = setting.SSH.AuthorizedKeysCommandTemplateTemplate.Execute(sb, map[string]any{
"AppPath": util.ShellEscape(setting.AppPath), "AppPath": util.ShellEscape(setting.AppPath),
"AppWorkPath": util.ShellEscape(setting.AppWorkPath), "AppWorkPath": util.ShellEscape(setting.AppWorkPath),
"CustomConf": util.ShellEscape(setting.CustomConf), "CustomConf": util.ShellEscape(setting.CustomConf),
@ -175,7 +175,7 @@ func RewriteAllPublicKeys() error {
// RegeneratePublicKeys regenerates the authorized_keys file // RegeneratePublicKeys regenerates the authorized_keys file
func RegeneratePublicKeys(ctx context.Context, t io.StringWriter) error { func RegeneratePublicKeys(ctx context.Context, t io.StringWriter) error {
if err := db.GetEngine(ctx).Where("type != ?", KeyTypePrincipal).Iterate(new(PublicKey), func(idx int, bean interface{}) (err error) { if err := db.GetEngine(ctx).Where("type != ?", KeyTypePrincipal).Iterate(new(PublicKey), func(idx int, bean any) (err error) {
_, err = t.WriteString((bean.(*PublicKey)).AuthorizedString()) _, err = t.WriteString((bean.(*PublicKey)).AuthorizedString())
return err return err
}); err != nil { }); err != nil {

View file

@ -97,7 +97,7 @@ func RewriteAllPrincipalKeys(ctx context.Context) error {
} }
func regeneratePrincipalKeys(ctx context.Context, t io.StringWriter) error { func regeneratePrincipalKeys(ctx context.Context, t io.StringWriter) error {
if err := db.GetEngine(ctx).Where("type = ?", KeyTypePrincipal).Iterate(new(PublicKey), func(idx int, bean interface{}) (err error) { if err := db.GetEngine(ctx).Where("type = ?", KeyTypePrincipal).Iterate(new(PublicKey), func(idx int, bean any) (err error) {
_, err = t.WriteString((bean.(*PublicKey)).AuthorizedString()) _, err = t.WriteString((bean.(*PublicKey)).AuthorizedString())
return err return err
}); err != nil { }); err != nil {

View file

@ -52,7 +52,7 @@ func (ctx *Context) Engine() Engine {
} }
// Value shadows Value for context.Context but allows us to get ourselves and an Engined object // Value shadows Value for context.Context but allows us to get ourselves and an Engined object
func (ctx *Context) Value(key interface{}) interface{} { func (ctx *Context) Value(key any) any {
if key == enginedContextKey { if key == enginedContextKey {
return ctx return ctx
} }
@ -163,28 +163,28 @@ func txWithNoCheck(parentCtx context.Context, f func(ctx context.Context) error)
} }
// Insert inserts records into database // Insert inserts records into database
func Insert(ctx context.Context, beans ...interface{}) error { func Insert(ctx context.Context, beans ...any) error {
_, err := GetEngine(ctx).Insert(beans...) _, err := GetEngine(ctx).Insert(beans...)
return err return err
} }
// Exec executes a sql with args // Exec executes a sql with args
func Exec(ctx context.Context, sqlAndArgs ...interface{}) (sql.Result, error) { func Exec(ctx context.Context, sqlAndArgs ...any) (sql.Result, error) {
return GetEngine(ctx).Exec(sqlAndArgs...) return GetEngine(ctx).Exec(sqlAndArgs...)
} }
// GetByBean filled empty fields of the bean according non-empty fields to query in database. // GetByBean filled empty fields of the bean according non-empty fields to query in database.
func GetByBean(ctx context.Context, bean interface{}) (bool, error) { func GetByBean(ctx context.Context, bean any) (bool, error) {
return GetEngine(ctx).Get(bean) return GetEngine(ctx).Get(bean)
} }
// DeleteByBean deletes all records according non-empty fields of the bean as conditions. // DeleteByBean deletes all records according non-empty fields of the bean as conditions.
func DeleteByBean(ctx context.Context, bean interface{}) (int64, error) { func DeleteByBean(ctx context.Context, bean any) (int64, error) {
return GetEngine(ctx).Delete(bean) return GetEngine(ctx).Delete(bean)
} }
// DeleteByID deletes the given bean with the given ID // DeleteByID deletes the given bean with the given ID
func DeleteByID(ctx context.Context, id int64, bean interface{}) (int64, error) { func DeleteByID(ctx context.Context, id int64, bean any) (int64, error) {
return GetEngine(ctx).ID(id).NoAutoTime().Delete(bean) return GetEngine(ctx).ID(id).NoAutoTime().Delete(bean)
} }
@ -203,13 +203,13 @@ func FindIDs(ctx context.Context, tableName, idCol string, cond builder.Cond) ([
// DecrByIDs decreases the given column for entities of the "bean" type with one of the given ids by one // DecrByIDs decreases the given column for entities of the "bean" type with one of the given ids by one
// Timestamps of the entities won't be updated // Timestamps of the entities won't be updated
func DecrByIDs(ctx context.Context, ids []int64, decrCol string, bean interface{}) error { func DecrByIDs(ctx context.Context, ids []int64, decrCol string, bean any) error {
_, err := GetEngine(ctx).Decr(decrCol).In("id", ids).NoAutoCondition().NoAutoTime().Update(bean) _, err := GetEngine(ctx).Decr(decrCol).In("id", ids).NoAutoCondition().NoAutoTime().Update(bean)
return err return err
} }
// DeleteBeans deletes all given beans, beans must contain delete conditions. // DeleteBeans deletes all given beans, beans must contain delete conditions.
func DeleteBeans(ctx context.Context, beans ...interface{}) (err error) { func DeleteBeans(ctx context.Context, beans ...any) (err error) {
e := GetEngine(ctx) e := GetEngine(ctx)
for i := range beans { for i := range beans {
if _, err = e.Delete(beans[i]); err != nil { if _, err = e.Delete(beans[i]); err != nil {
@ -220,7 +220,7 @@ func DeleteBeans(ctx context.Context, beans ...interface{}) (err error) {
} }
// TruncateBeans deletes all given beans, beans may contain delete conditions. // TruncateBeans deletes all given beans, beans may contain delete conditions.
func TruncateBeans(ctx context.Context, beans ...interface{}) (err error) { func TruncateBeans(ctx context.Context, beans ...any) (err error) {
e := GetEngine(ctx) e := GetEngine(ctx)
for i := range beans { for i := range beans {
if _, err = e.Truncate(beans[i]); err != nil { if _, err = e.Truncate(beans[i]); err != nil {
@ -231,12 +231,12 @@ func TruncateBeans(ctx context.Context, beans ...interface{}) (err error) {
} }
// CountByBean counts the number of database records according non-empty fields of the bean as conditions. // CountByBean counts the number of database records according non-empty fields of the bean as conditions.
func CountByBean(ctx context.Context, bean interface{}) (int64, error) { func CountByBean(ctx context.Context, bean any) (int64, error) {
return GetEngine(ctx).Count(bean) return GetEngine(ctx).Count(bean)
} }
// TableName returns the table name according a bean object // TableName returns the table name according a bean object
func TableName(bean interface{}) string { func TableName(bean any) string {
return x.TableName(bean) return x.TableName(bean)
} }

View file

@ -25,7 +25,7 @@ import (
var ( var (
x *xorm.Engine x *xorm.Engine
tables []interface{} tables []any
initFuncs []func() error initFuncs []func() error
// HasEngine specifies if we have a xorm.Engine // HasEngine specifies if we have a xorm.Engine
@ -34,41 +34,41 @@ var (
// Engine represents a xorm engine or session. // Engine represents a xorm engine or session.
type Engine interface { type Engine interface {
Table(tableNameOrBean interface{}) *xorm.Session Table(tableNameOrBean any) *xorm.Session
Count(...interface{}) (int64, error) Count(...any) (int64, error)
Decr(column string, arg ...interface{}) *xorm.Session Decr(column string, arg ...any) *xorm.Session
Delete(...interface{}) (int64, error) Delete(...any) (int64, error)
Truncate(...interface{}) (int64, error) Truncate(...any) (int64, error)
Exec(...interface{}) (sql.Result, error) Exec(...any) (sql.Result, error)
Find(interface{}, ...interface{}) error Find(any, ...any) error
Get(beans ...interface{}) (bool, error) Get(beans ...any) (bool, error)
ID(interface{}) *xorm.Session ID(any) *xorm.Session
In(string, ...interface{}) *xorm.Session In(string, ...any) *xorm.Session
Incr(column string, arg ...interface{}) *xorm.Session Incr(column string, arg ...any) *xorm.Session
Insert(...interface{}) (int64, error) Insert(...any) (int64, error)
Iterate(interface{}, xorm.IterFunc) error Iterate(any, xorm.IterFunc) error
Join(joinOperator string, tablename, condition interface{}, args ...interface{}) *xorm.Session Join(joinOperator string, tablename, condition any, args ...any) *xorm.Session
SQL(interface{}, ...interface{}) *xorm.Session SQL(any, ...any) *xorm.Session
Where(interface{}, ...interface{}) *xorm.Session Where(any, ...any) *xorm.Session
Asc(colNames ...string) *xorm.Session Asc(colNames ...string) *xorm.Session
Desc(colNames ...string) *xorm.Session Desc(colNames ...string) *xorm.Session
Limit(limit int, start ...int) *xorm.Session Limit(limit int, start ...int) *xorm.Session
NoAutoTime() *xorm.Session NoAutoTime() *xorm.Session
SumInt(bean interface{}, columnName string) (res int64, err error) SumInt(bean any, columnName string) (res int64, err error)
Sync2(...interface{}) error Sync2(...any) error
Select(string) *xorm.Session Select(string) *xorm.Session
NotIn(string, ...interface{}) *xorm.Session NotIn(string, ...any) *xorm.Session
OrderBy(interface{}, ...interface{}) *xorm.Session OrderBy(any, ...any) *xorm.Session
Exist(...interface{}) (bool, error) Exist(...any) (bool, error)
Distinct(...string) *xorm.Session Distinct(...string) *xorm.Session
Query(...interface{}) ([]map[string][]byte, error) Query(...any) ([]map[string][]byte, error)
Cols(...string) *xorm.Session Cols(...string) *xorm.Session
Context(ctx context.Context) *xorm.Session Context(ctx context.Context) *xorm.Session
Ping() error Ping() error
} }
// TableInfo returns table's information via an object // TableInfo returns table's information via an object
func TableInfo(v interface{}) (*schemas.Table, error) { func TableInfo(v any) (*schemas.Table, error) {
return x.TableInfo(v) return x.TableInfo(v)
} }
@ -78,7 +78,7 @@ func DumpTables(tables []*schemas.Table, w io.Writer, tp ...schemas.DBType) erro
} }
// RegisterModel registers model, if initfunc provided, it will be invoked after data model sync // RegisterModel registers model, if initfunc provided, it will be invoked after data model sync
func RegisterModel(bean interface{}, initFunc ...func() error) { func RegisterModel(bean any, initFunc ...func() error) {
tables = append(tables, bean) tables = append(tables, bean)
if len(initFuncs) > 0 && initFunc[0] != nil { if len(initFuncs) > 0 && initFunc[0] != nil {
initFuncs = append(initFuncs, initFunc[0]) initFuncs = append(initFuncs, initFunc[0])
@ -209,14 +209,14 @@ func InitEngineWithMigration(ctx context.Context, migrateFunc func(*xorm.Engine)
} }
// NamesToBean return a list of beans or an error // NamesToBean return a list of beans or an error
func NamesToBean(names ...string) ([]interface{}, error) { func NamesToBean(names ...string) ([]any, error) {
beans := []interface{}{} beans := []any{}
if len(names) == 0 { if len(names) == 0 {
beans = append(beans, tables...) beans = append(beans, tables...)
return beans, nil return beans, nil
} }
// Need to map provided names to beans... // Need to map provided names to beans...
beanMap := make(map[string]interface{}) beanMap := make(map[string]any)
for _, bean := range tables { for _, bean := range tables {
beanMap[strings.ToLower(reflect.Indirect(reflect.ValueOf(bean)).Type().Name())] = bean beanMap[strings.ToLower(reflect.Indirect(reflect.ValueOf(bean)).Type().Name())] = bean
@ -224,7 +224,7 @@ func NamesToBean(names ...string) ([]interface{}, error) {
beanMap[strings.ToLower(x.TableName(bean, true))] = bean beanMap[strings.ToLower(x.TableName(bean, true))] = bean
} }
gotBean := make(map[interface{}]bool) gotBean := make(map[any]bool)
for _, name := range names { for _, name := range names {
bean, ok := beanMap[strings.ToLower(strings.TrimSpace(name))] bean, ok := beanMap[strings.ToLower(strings.TrimSpace(name))]
if !ok { if !ok {
@ -266,7 +266,7 @@ func DumpDatabase(filePath, dbType string) error {
} }
// MaxBatchInsertSize returns the table's max batch insert size // MaxBatchInsertSize returns the table's max batch insert size
func MaxBatchInsertSize(bean interface{}) int { func MaxBatchInsertSize(bean any) int {
t, err := x.TableInfo(bean) t, err := x.TableInfo(bean)
if err != nil { if err != nil {
return 50 return 50
@ -286,7 +286,7 @@ func DeleteAllRecords(tableName string) error {
} }
// GetMaxID will return max id of the table // GetMaxID will return max id of the table
func GetMaxID(beanOrTableName interface{}) (maxID int64, err error) { func GetMaxID(beanOrTableName any) (maxID int64, err error) {
_, err = x.Select("MAX(id)").Table(beanOrTableName).Get(&maxID) _, err = x.Select("MAX(id)").Table(beanOrTableName).Get(&maxID)
return maxID, err return maxID, err
} }

View file

@ -25,7 +25,7 @@ func (err ErrCancelled) Error() string {
} }
// ErrCancelledf returns an ErrCancelled for the provided format and args // ErrCancelledf returns an ErrCancelled for the provided format and args
func ErrCancelledf(format string, args ...interface{}) error { func ErrCancelledf(format string, args ...any) error {
return ErrCancelled{ return ErrCancelled{
fmt.Sprintf(format, args...), fmt.Sprintf(format, args...),
} }

View file

@ -28,47 +28,47 @@ func NewXORMLogger(showSQL bool) xormlog.Logger {
const stackLevel = 8 const stackLevel = 8
// Log a message with defined skip and at logging level // Log a message with defined skip and at logging level
func (l *XORMLogBridge) Log(skip int, level log.Level, format string, v ...interface{}) { func (l *XORMLogBridge) Log(skip int, level log.Level, format string, v ...any) {
l.logger.Log(skip+1, level, format, v...) l.logger.Log(skip+1, level, format, v...)
} }
// Debug show debug log // Debug show debug log
func (l *XORMLogBridge) Debug(v ...interface{}) { func (l *XORMLogBridge) Debug(v ...any) {
l.Log(stackLevel, log.DEBUG, "%s", fmt.Sprint(v...)) l.Log(stackLevel, log.DEBUG, "%s", fmt.Sprint(v...))
} }
// Debugf show debug log // Debugf show debug log
func (l *XORMLogBridge) Debugf(format string, v ...interface{}) { func (l *XORMLogBridge) Debugf(format string, v ...any) {
l.Log(stackLevel, log.DEBUG, format, v...) l.Log(stackLevel, log.DEBUG, format, v...)
} }
// Error show error log // Error show error log
func (l *XORMLogBridge) Error(v ...interface{}) { func (l *XORMLogBridge) Error(v ...any) {
l.Log(stackLevel, log.ERROR, "%s", fmt.Sprint(v...)) l.Log(stackLevel, log.ERROR, "%s", fmt.Sprint(v...))
} }
// Errorf show error log // Errorf show error log
func (l *XORMLogBridge) Errorf(format string, v ...interface{}) { func (l *XORMLogBridge) Errorf(format string, v ...any) {
l.Log(stackLevel, log.ERROR, format, v...) l.Log(stackLevel, log.ERROR, format, v...)
} }
// Info show information level log // Info show information level log
func (l *XORMLogBridge) Info(v ...interface{}) { func (l *XORMLogBridge) Info(v ...any) {
l.Log(stackLevel, log.INFO, "%s", fmt.Sprint(v...)) l.Log(stackLevel, log.INFO, "%s", fmt.Sprint(v...))
} }
// Infof show information level log // Infof show information level log
func (l *XORMLogBridge) Infof(format string, v ...interface{}) { func (l *XORMLogBridge) Infof(format string, v ...any) {
l.Log(stackLevel, log.INFO, format, v...) l.Log(stackLevel, log.INFO, format, v...)
} }
// Warn show warning log // Warn show warning log
func (l *XORMLogBridge) Warn(v ...interface{}) { func (l *XORMLogBridge) Warn(v ...any) {
l.Log(stackLevel, log.WARN, "%s", fmt.Sprint(v...)) l.Log(stackLevel, log.WARN, "%s", fmt.Sprint(v...))
} }
// Warnf show warnning log // Warnf show warnning log
func (l *XORMLogBridge) Warnf(format string, v ...interface{}) { func (l *XORMLogBridge) Warnf(format string, v ...any) {
l.Log(stackLevel, log.WARN, format, v...) l.Log(stackLevel, log.WARN, format, v...)
} }

View file

@ -172,7 +172,7 @@ func RenameBranch(ctx context.Context, repo *repo_model.Repository, from, to str
// 3. Update all not merged pull request base branch name // 3. Update all not merged pull request base branch name
_, err = sess.Table("pull_request").Where("base_repo_id=? AND base_branch=? AND has_merged=?", _, err = sess.Table("pull_request").Where("base_repo_id=? AND base_branch=? AND has_merged=?",
repo.ID, from, false). repo.ID, from, false).
Update(map[string]interface{}{"base_branch": to}) Update(map[string]any{"base_branch": to})
if err != nil { if err != nil {
return err return err
} }

View file

@ -264,7 +264,7 @@ func LFSAutoAssociate(ctx context.Context, metas []*LFSMetaObject, user *user_mo
sess := db.GetEngine(ctx) sess := db.GetEngine(ctx)
oids := make([]interface{}, len(metas)) oids := make([]any, len(metas))
oidMap := make(map[string]*LFSMetaObject, len(metas)) oidMap := make(map[string]*LFSMetaObject, len(metas))
for i, meta := range metas { for i, meta := range metas {
oids[i] = meta.Oid oids[i] = meta.Oid

View file

@ -1131,7 +1131,7 @@ func DeleteComment(ctx context.Context, comment *Comment) error {
} }
if _, err := e.Table("action"). if _, err := e.Table("action").
Where("comment_id = ?", comment.ID). Where("comment_id = ?", comment.ID).
Update(map[string]interface{}{ Update(map[string]any{
"is_deleted": true, "is_deleted": true,
}); err != nil { }); err != nil {
return err return err
@ -1156,7 +1156,7 @@ func UpdateCommentsMigrationsByType(tp structs.GitServiceType, originalAuthorID
}), }),
)). )).
And("comment.original_author_id = ?", originalAuthorID). And("comment.original_author_id = ?", originalAuthorID).
Update(map[string]interface{}{ Update(map[string]any{
"poster_id": posterID, "poster_id": posterID,
"original_author": "", "original_author": "",
"original_author_id": 0, "original_author_id": 0,

View file

@ -714,7 +714,7 @@ func (issue *Issue) Pin(ctx context.Context, user *user_model.User) error {
_, err = db.GetEngine(ctx).Table("issue"). _, err = db.GetEngine(ctx).Table("issue").
Where("id = ?", issue.ID). Where("id = ?", issue.ID).
Update(map[string]interface{}{ Update(map[string]any{
"pin_order": maxPin + 1, "pin_order": maxPin + 1,
}) })
if err != nil { if err != nil {
@ -750,7 +750,7 @@ func (issue *Issue) Unpin(ctx context.Context, user *user_model.User) error {
_, err = db.GetEngine(ctx).Table("issue"). _, err = db.GetEngine(ctx).Table("issue").
Where("id = ?", issue.ID). Where("id = ?", issue.ID).
Update(map[string]interface{}{ Update(map[string]any{
"pin_order": 0, "pin_order": 0,
}) })
if err != nil { if err != nil {
@ -822,7 +822,7 @@ func (issue *Issue) MovePin(ctx context.Context, newPosition int) error {
_, err = db.GetEngine(dbctx).Table("issue"). _, err = db.GetEngine(dbctx).Table("issue").
Where("id = ?", issue.ID). Where("id = ?", issue.ID).
Update(map[string]interface{}{ Update(map[string]any{
"pin_order": newPosition, "pin_order": newPosition,
}) })
if err != nil { if err != nil {

View file

@ -511,7 +511,7 @@ func UpdateIssueDeadline(issue *Issue, deadlineUnix timeutil.TimeStamp, doer *us
} }
// DeleteInIssue delete records in beans with external key issue_id = ? // DeleteInIssue delete records in beans with external key issue_id = ?
func DeleteInIssue(ctx context.Context, issueID int64, beans ...interface{}) error { func DeleteInIssue(ctx context.Context, issueID int64, beans ...any) error {
e := db.GetEngine(ctx) e := db.GetEngine(ctx)
for _, bean := range beans { for _, bean := range beans {
if _, err := e.In("issue_id", issueID).Delete(bean); err != nil { if _, err := e.In("issue_id", issueID).Delete(bean); err != nil {
@ -673,7 +673,7 @@ func UpdateIssuesMigrationsByType(gitServiceType api.GitServiceType, originalAut
_, err := db.GetEngine(db.DefaultContext).Table("issue"). _, err := db.GetEngine(db.DefaultContext).Table("issue").
Where("repo_id IN (SELECT id FROM repository WHERE original_service_type = ?)", gitServiceType). Where("repo_id IN (SELECT id FROM repository WHERE original_service_type = ?)", gitServiceType).
And("original_author_id = ?", originalAuthorID). And("original_author_id = ?", originalAuthorID).
Update(map[string]interface{}{ Update(map[string]any{
"poster_id": posterID, "poster_id": posterID,
"original_author": "", "original_author": "",
"original_author_id": 0, "original_author_id": 0,
@ -686,7 +686,7 @@ func UpdateReactionsMigrationsByType(gitServiceType api.GitServiceType, original
_, err := db.GetEngine(db.DefaultContext).Table("reaction"). _, err := db.GetEngine(db.DefaultContext).Table("reaction").
Where("original_author_id = ?", originalAuthorID). Where("original_author_id = ?", originalAuthorID).
And(migratedIssueCond(gitServiceType)). And(migratedIssueCond(gitServiceType)).
Update(map[string]interface{}{ Update(map[string]any{
"user_id": userID, "user_id": userID,
"original_author": "", "original_author": "",
"original_author_id": 0, "original_author_id": 0,

View file

@ -1090,7 +1090,7 @@ func UpdateReviewsMigrationsByType(tp structs.GitServiceType, originalAuthorID s
_, err := db.GetEngine(db.DefaultContext).Table("review"). _, err := db.GetEngine(db.DefaultContext).Table("review").
Where("original_author_id = ?", originalAuthorID). Where("original_author_id = ?", originalAuthorID).
And(migratedIssueCond(tp)). And(migratedIssueCond(tp)).
Update(map[string]interface{}{ Update(map[string]any{
"reviewer_id": posterID, "reviewer_id": posterID,
"original_author": "", "original_author": "",
"original_author_id": 0, "original_author_id": 0,

View file

@ -27,7 +27,7 @@ import (
// RecreateTables will recreate the tables for the provided beans using the newly provided bean definition and move all data to that new table // RecreateTables will recreate the tables for the provided beans using the newly provided bean definition and move all data to that new table
// WARNING: YOU MUST PROVIDE THE FULL BEAN DEFINITION // WARNING: YOU MUST PROVIDE THE FULL BEAN DEFINITION
func RecreateTables(beans ...interface{}) func(*xorm.Engine) error { func RecreateTables(beans ...any) func(*xorm.Engine) error {
return func(x *xorm.Engine) error { return func(x *xorm.Engine) error {
sess := x.NewSession() sess := x.NewSession()
defer sess.Close() defer sess.Close()
@ -48,7 +48,7 @@ func RecreateTables(beans ...interface{}) func(*xorm.Engine) error {
// RecreateTable will recreate the table using the newly provided bean definition and move all data to that new table // RecreateTable will recreate the table using the newly provided bean definition and move all data to that new table
// WARNING: YOU MUST PROVIDE THE FULL BEAN DEFINITION // WARNING: YOU MUST PROVIDE THE FULL BEAN DEFINITION
// WARNING: YOU MUST COMMIT THE SESSION AT THE END // WARNING: YOU MUST COMMIT THE SESSION AT THE END
func RecreateTable(sess *xorm.Session, bean interface{}) error { func RecreateTable(sess *xorm.Session, bean any) error {
// TODO: This will not work if there are foreign keys // TODO: This will not work if there are foreign keys
tableName := sess.Engine().TableName(bean) tableName := sess.Engine().TableName(bean)

View file

@ -30,7 +30,7 @@ import (
// Provide models to be sync'd with the database - in particular any models you expect fixtures to be loaded from. // Provide models to be sync'd with the database - in particular any models you expect fixtures to be loaded from.
// //
// fixtures in `models/migrations/fixtures/<TestName>` will be loaded automatically // fixtures in `models/migrations/fixtures/<TestName>` will be loaded automatically
func PrepareTestEnv(t *testing.T, skip int, syncModels ...interface{}) (*xorm.Engine, func()) { func PrepareTestEnv(t *testing.T, skip int, syncModels ...any) (*xorm.Engine, func()) {
t.Helper() t.Helper()
ourSkip := 2 ourSkip := 2
ourSkip += skip ourSkip += skip

View file

@ -59,11 +59,11 @@ func UpdateMigrationServiceTypes(x *xorm.Engine) error {
} }
type ExternalLoginUser struct { type ExternalLoginUser struct {
ExternalID string `xorm:"pk NOT NULL"` ExternalID string `xorm:"pk NOT NULL"`
UserID int64 `xorm:"INDEX NOT NULL"` UserID int64 `xorm:"INDEX NOT NULL"`
LoginSourceID int64 `xorm:"pk NOT NULL"` LoginSourceID int64 `xorm:"pk NOT NULL"`
RawData map[string]interface{} `xorm:"TEXT JSON"` RawData map[string]any `xorm:"TEXT JSON"`
Provider string `xorm:"index VARCHAR(25)"` Provider string `xorm:"index VARCHAR(25)"`
Email string Email string
Name string Name string
FirstName string FirstName string

View file

@ -14,7 +14,7 @@ import (
) )
func UnwrapLDAPSourceCfg(x *xorm.Engine) error { func UnwrapLDAPSourceCfg(x *xorm.Engine) error {
jsonUnmarshalHandleDoubleEncode := func(bs []byte, v interface{}) error { jsonUnmarshalHandleDoubleEncode := func(bs []byte, v any) error {
err := json.Unmarshal(bs, v) err := json.Unmarshal(bs, v)
if err != nil { if err != nil {
ok := true ok := true
@ -54,7 +54,7 @@ func UnwrapLDAPSourceCfg(x *xorm.Engine) error {
const dldapType = 5 const dldapType = 5
type WrappedSource struct { type WrappedSource struct {
Source map[string]interface{} Source map[string]any
} }
// change lower_email as unique // change lower_email as unique
@ -77,7 +77,7 @@ func UnwrapLDAPSourceCfg(x *xorm.Engine) error {
for _, source := range sources { for _, source := range sources {
wrapped := &WrappedSource{ wrapped := &WrappedSource{
Source: map[string]interface{}{}, Source: map[string]any{},
} }
err := jsonUnmarshalHandleDoubleEncode([]byte(source.Cfg), &wrapped) err := jsonUnmarshalHandleDoubleEncode([]byte(source.Cfg), &wrapped)
if err != nil { if err != nil {

View file

@ -62,8 +62,8 @@ func Test_UnwrapLDAPSourceCfg(t *testing.T) {
} }
for _, source := range sources { for _, source := range sources {
converted := map[string]interface{}{} converted := map[string]any{}
expected := map[string]interface{}{} expected := map[string]any{}
if err := json.Unmarshal([]byte(source.Cfg), &converted); err != nil { if err := json.Unmarshal([]byte(source.Cfg), &converted); err != nil {
assert.NoError(t, err) assert.NoError(t, err)

View file

@ -79,7 +79,7 @@ func Test_AddHeaderAuthorizationEncryptedColWebhook(t *testing.T) {
return return
} }
for _, h := range hookTasks { for _, h := range hookTasks {
var m map[string]interface{} var m map[string]any
err := json.Unmarshal([]byte(h.PayloadContent), &m) err := json.Unmarshal([]byte(h.PayloadContent), &m)
assert.NoError(t, err) assert.NoError(t, err)
assert.Nil(t, m["access_token"]) assert.Nil(t, m["access_token"])

View file

@ -81,11 +81,11 @@ func AddIssueDependencies(x *xorm.Engine) (err error) {
// RepoUnit describes all units of a repository // RepoUnit describes all units of a repository
type RepoUnit struct { type RepoUnit struct {
ID int64 ID int64
RepoID int64 `xorm:"INDEX(s)"` RepoID int64 `xorm:"INDEX(s)"`
Type int `xorm:"INDEX(s)"` Type int `xorm:"INDEX(s)"`
Config map[string]interface{} `xorm:"JSON"` Config map[string]any `xorm:"JSON"`
CreatedUnix int64 `xorm:"INDEX CREATED"` CreatedUnix int64 `xorm:"INDEX CREATED"`
Created time.Time `xorm:"-"` Created time.Time `xorm:"-"`
} }
// Updating existing issue units // Updating existing issue units
@ -96,7 +96,7 @@ func AddIssueDependencies(x *xorm.Engine) (err error) {
} }
for _, unit := range units { for _, unit := range units {
if unit.Config == nil { if unit.Config == nil {
unit.Config = make(map[string]interface{}) unit.Config = make(map[string]any)
} }
if _, ok := unit.Config["EnableDependencies"]; !ok { if _, ok := unit.Config["EnableDependencies"]; !ok {
unit.Config["EnableDependencies"] = setting.Service.DefaultEnableDependencies unit.Config["EnableDependencies"] = setting.Service.DefaultEnableDependencies

View file

@ -15,10 +15,10 @@ func AddPullRequestRebaseWithMerge(x *xorm.Engine) error {
// RepoUnit describes all units of a repository // RepoUnit describes all units of a repository
type RepoUnit struct { type RepoUnit struct {
ID int64 ID int64
RepoID int64 `xorm:"INDEX(s)"` RepoID int64 `xorm:"INDEX(s)"`
Type int `xorm:"INDEX(s)"` Type int `xorm:"INDEX(s)"`
Config map[string]interface{} `xorm:"JSON"` Config map[string]any `xorm:"JSON"`
CreatedUnix timeutil.TimeStamp `xorm:"INDEX CREATED"` CreatedUnix timeutil.TimeStamp `xorm:"INDEX CREATED"`
} }
const ( const (
@ -46,7 +46,7 @@ func AddPullRequestRebaseWithMerge(x *xorm.Engine) error {
} }
for _, unit := range units { for _, unit := range units {
if unit.Config == nil { if unit.Config == nil {
unit.Config = make(map[string]interface{}) unit.Config = make(map[string]any)
} }
// Allow the new merge style if all other merge styles are allowed // Allow the new merge style if all other merge styles are allowed
allowMergeRebase := true allowMergeRebase := true

View file

@ -59,7 +59,7 @@ type PackageDescriptor struct {
Creator *user_model.User Creator *user_model.User
PackageProperties PackagePropertyList PackageProperties PackagePropertyList
VersionProperties PackagePropertyList VersionProperties PackagePropertyList
Metadata interface{} Metadata any
Files []*PackageFileDescriptor Files []*PackageFileDescriptor
} }
@ -136,7 +136,7 @@ func GetPackageDescriptor(ctx context.Context, pv *PackageVersion) (*PackageDesc
return nil, err return nil, err
} }
var metadata interface{} var metadata any
switch p.Type { switch p.Type {
case TypeAlpine: case TypeAlpine:
metadata = &alpine.VersionMetadata{} metadata = &alpine.VersionMetadata{}

View file

@ -456,7 +456,7 @@ func repoStatsCorrectNumClosedPulls(ctx context.Context, id int64) error {
return repo_model.UpdateRepoIssueNumbers(ctx, id, true, true) return repo_model.UpdateRepoIssueNumbers(ctx, id, true, true)
} }
func statsQuery(args ...interface{}) func(context.Context) ([]map[string][]byte, error) { func statsQuery(args ...any) func(context.Context) ([]map[string][]byte, error) {
return func(ctx context.Context) ([]map[string][]byte, error) { return func(ctx context.Context) ([]map[string][]byte, error) {
return db.GetEngine(ctx).Query(args...) return db.GetEngine(ctx).Query(args...)
} }

View file

@ -105,7 +105,7 @@ func DeleteMirrorByRepoID(repoID int64) error {
} }
// MirrorsIterate iterates all mirror repositories. // MirrorsIterate iterates all mirror repositories.
func MirrorsIterate(limit int, f func(idx int, bean interface{}) error) error { func MirrorsIterate(limit int, f func(idx int, bean any) error) error {
sess := db.GetEngine(db.DefaultContext). sess := db.GetEngine(db.DefaultContext).
Where("next_update_unix<=?", time.Now().Unix()). Where("next_update_unix<=?", time.Now().Unix()).
And("next_update_unix!=0"). And("next_update_unix!=0").

View file

@ -127,7 +127,7 @@ func GetPushMirrorsSyncedOnCommit(ctx context.Context, repoID int64) ([]*PushMir
} }
// PushMirrorsIterate iterates all push-mirror repositories. // PushMirrorsIterate iterates all push-mirror repositories.
func PushMirrorsIterate(ctx context.Context, limit int, f func(idx int, bean interface{}) error) error { func PushMirrorsIterate(ctx context.Context, limit int, f func(idx int, bean any) error) error {
sess := db.GetEngine(ctx). sess := db.GetEngine(ctx).
Where("last_update + (`interval` / ?) <= ?", time.Second, time.Now().Unix()). Where("last_update + (`interval` / ?) <= ?", time.Second, time.Now().Unix()).
And("`interval` != 0"). And("`interval` != 0").

View file

@ -41,7 +41,7 @@ func TestPushMirrorsIterate(t *testing.T) {
time.Sleep(1 * time.Millisecond) time.Sleep(1 * time.Millisecond)
repo_model.PushMirrorsIterate(db.DefaultContext, 1, func(idx int, bean interface{}) error { repo_model.PushMirrorsIterate(db.DefaultContext, 1, func(idx int, bean any) error {
m, ok := bean.(*repo_model.PushMirror) m, ok := bean.(*repo_model.PushMirror)
assert.True(t, ok) assert.True(t, ok)
assert.Equal(t, "test-1", m.RemoteName) assert.Equal(t, "test-1", m.RemoteName)

View file

@ -442,7 +442,7 @@ func UpdateReleasesMigrationsByType(gitServiceType structs.GitServiceType, origi
_, err := db.GetEngine(db.DefaultContext).Table("release"). _, err := db.GetEngine(db.DefaultContext).Table("release").
Where("repo_id IN (SELECT id FROM repository WHERE original_service_type = ?)", gitServiceType). Where("repo_id IN (SELECT id FROM repository WHERE original_service_type = ?)", gitServiceType).
And("original_author_id = ?", originalAuthorID). And("original_author_id = ?", originalAuthorID).
Update(map[string]interface{}{ Update(map[string]any{
"publisher_id": posterID, "publisher_id": posterID,
"original_author": "", "original_author": "",
"original_author_id": 0, "original_author_id": 0,

View file

@ -560,7 +560,7 @@ func searchRepositoryByCondition(ctx context.Context, opts *SearchRepoOptions, c
opts.OrderBy = db.SearchOrderByAlphabetically opts.OrderBy = db.SearchOrderByAlphabetically
} }
args := make([]interface{}, 0) args := make([]any, 0)
if opts.PriorityOwnerID > 0 { if opts.PriorityOwnerID > 0 {
opts.OrderBy = db.SearchOrderBy(fmt.Sprintf("CASE WHEN owner_id = ? THEN 0 ELSE owner_id END, %s", opts.OrderBy)) opts.OrderBy = db.SearchOrderBy(fmt.Sprintf("CASE WHEN owner_id = ? THEN 0 ELSE owner_id END, %s", opts.OrderBy))
args = append(args, opts.PriorityOwnerID) args = append(args, opts.PriorityOwnerID)

View file

@ -43,7 +43,7 @@ func (n *Notice) TrStr() string {
} }
// CreateNotice creates new system notice. // CreateNotice creates new system notice.
func CreateNotice(ctx context.Context, tp NoticeType, desc string, args ...interface{}) error { func CreateNotice(ctx context.Context, tp NoticeType, desc string, args ...any) error {
if len(args) > 0 { if len(args) > 0 {
desc = fmt.Sprintf(desc, args...) desc = fmt.Sprintf(desc, args...)
} }
@ -55,7 +55,7 @@ func CreateNotice(ctx context.Context, tp NoticeType, desc string, args ...inter
} }
// CreateRepositoryNotice creates new system notice with type NoticeRepository. // CreateRepositoryNotice creates new system notice with type NoticeRepository.
func CreateRepositoryNotice(desc string, args ...interface{}) error { func CreateRepositoryNotice(desc string, args ...any) error {
// Note we use the db.DefaultContext here rather than passing in a context as the context may be cancelled // Note we use the db.DefaultContext here rather than passing in a context as the context may be cancelled
return CreateNotice(db.DefaultContext, NoticeRepository, desc, args...) return CreateNotice(db.DefaultContext, NoticeRepository, desc, args...)
} }

View file

@ -21,10 +21,10 @@ const (
modelsCommentTypeComment = 0 modelsCommentTypeComment = 0
) )
var consistencyCheckMap = make(map[string]func(t assert.TestingT, bean interface{})) var consistencyCheckMap = make(map[string]func(t assert.TestingT, bean any))
// CheckConsistencyFor test that all matching database entries are consistent // CheckConsistencyFor test that all matching database entries are consistent
func CheckConsistencyFor(t assert.TestingT, beansToCheck ...interface{}) { func CheckConsistencyFor(t assert.TestingT, beansToCheck ...any) {
for _, bean := range beansToCheck { for _, bean := range beansToCheck {
sliceType := reflect.SliceOf(reflect.TypeOf(bean)) sliceType := reflect.SliceOf(reflect.TypeOf(bean))
sliceValue := reflect.MakeSlice(sliceType, 0, 10) sliceValue := reflect.MakeSlice(sliceType, 0, 10)
@ -42,7 +42,7 @@ func CheckConsistencyFor(t assert.TestingT, beansToCheck ...interface{}) {
} }
} }
func checkForConsistency(t assert.TestingT, bean interface{}) { func checkForConsistency(t assert.TestingT, bean any) {
tb, err := db.TableInfo(bean) tb, err := db.TableInfo(bean)
assert.NoError(t, err) assert.NoError(t, err)
f := consistencyCheckMap[tb.Name] f := consistencyCheckMap[tb.Name]
@ -63,7 +63,7 @@ func init() {
return i return i
} }
checkForUserConsistency := func(t assert.TestingT, bean interface{}) { checkForUserConsistency := func(t assert.TestingT, bean any) {
user := reflectionWrap(bean) user := reflectionWrap(bean)
AssertCountByCond(t, "repository", builder.Eq{"owner_id": user.int("ID")}, user.int("NumRepos")) AssertCountByCond(t, "repository", builder.Eq{"owner_id": user.int("ID")}, user.int("NumRepos"))
AssertCountByCond(t, "star", builder.Eq{"uid": user.int("ID")}, user.int("NumStars")) AssertCountByCond(t, "star", builder.Eq{"uid": user.int("ID")}, user.int("NumStars"))
@ -77,7 +77,7 @@ func init() {
} }
} }
checkForRepoConsistency := func(t assert.TestingT, bean interface{}) { checkForRepoConsistency := func(t assert.TestingT, bean any) {
repo := reflectionWrap(bean) repo := reflectionWrap(bean)
assert.Equal(t, repo.str("LowerName"), strings.ToLower(repo.str("Name")), "repo: %+v", repo) assert.Equal(t, repo.str("LowerName"), strings.ToLower(repo.str("Name")), "repo: %+v", repo)
AssertCountByCond(t, "star", builder.Eq{"repo_id": repo.int("ID")}, repo.int("NumStars")) AssertCountByCond(t, "star", builder.Eq{"repo_id": repo.int("ID")}, repo.int("NumStars"))
@ -113,7 +113,7 @@ func init() {
"Unexpected number of closed milestones for repo id: %d", repo.int("ID")) "Unexpected number of closed milestones for repo id: %d", repo.int("ID"))
} }
checkForIssueConsistency := func(t assert.TestingT, bean interface{}) { checkForIssueConsistency := func(t assert.TestingT, bean any) {
issue := reflectionWrap(bean) issue := reflectionWrap(bean)
typeComment := modelsCommentTypeComment typeComment := modelsCommentTypeComment
actual := GetCountByCond(t, "comment", builder.Eq{"`type`": typeComment, "issue_id": issue.int("ID")}) actual := GetCountByCond(t, "comment", builder.Eq{"`type`": typeComment, "issue_id": issue.int("ID")})
@ -124,14 +124,14 @@ func init() {
} }
} }
checkForPullRequestConsistency := func(t assert.TestingT, bean interface{}) { checkForPullRequestConsistency := func(t assert.TestingT, bean any) {
pr := reflectionWrap(bean) pr := reflectionWrap(bean)
issueRow := AssertExistsAndLoadMap(t, "issue", builder.Eq{"id": pr.int("IssueID")}) issueRow := AssertExistsAndLoadMap(t, "issue", builder.Eq{"id": pr.int("IssueID")})
assert.True(t, parseBool(issueRow["is_pull"])) assert.True(t, parseBool(issueRow["is_pull"]))
assert.EqualValues(t, parseInt(issueRow["index"]), pr.int("Index"), "Unexpected index for pull request id: %d", pr.int("ID")) assert.EqualValues(t, parseInt(issueRow["index"]), pr.int("Index"), "Unexpected index for pull request id: %d", pr.int("ID"))
} }
checkForMilestoneConsistency := func(t assert.TestingT, bean interface{}) { checkForMilestoneConsistency := func(t assert.TestingT, bean any) {
milestone := reflectionWrap(bean) milestone := reflectionWrap(bean)
AssertCountByCond(t, "issue", builder.Eq{"milestone_id": milestone.int("ID")}, milestone.int("NumIssues")) AssertCountByCond(t, "issue", builder.Eq{"milestone_id": milestone.int("ID")}, milestone.int("NumIssues"))
@ -145,7 +145,7 @@ func init() {
assert.Equal(t, completeness, milestone.int("Completeness")) assert.Equal(t, completeness, milestone.int("Completeness"))
} }
checkForLabelConsistency := func(t assert.TestingT, bean interface{}) { checkForLabelConsistency := func(t assert.TestingT, bean any) {
label := reflectionWrap(bean) label := reflectionWrap(bean)
issueLabels, err := db.GetEngine(db.DefaultContext).Table("issue_label"). issueLabels, err := db.GetEngine(db.DefaultContext).Table("issue_label").
Where(builder.Eq{"label_id": label.int("ID")}). Where(builder.Eq{"label_id": label.int("ID")}).
@ -166,13 +166,13 @@ func init() {
assert.EqualValues(t, expected, label.int("NumClosedIssues"), "Unexpected number of closed issues for label id: %d", label.int("ID")) assert.EqualValues(t, expected, label.int("NumClosedIssues"), "Unexpected number of closed issues for label id: %d", label.int("ID"))
} }
checkForTeamConsistency := func(t assert.TestingT, bean interface{}) { checkForTeamConsistency := func(t assert.TestingT, bean any) {
team := reflectionWrap(bean) team := reflectionWrap(bean)
AssertCountByCond(t, "team_user", builder.Eq{"team_id": team.int("ID")}, team.int("NumMembers")) AssertCountByCond(t, "team_user", builder.Eq{"team_id": team.int("ID")}, team.int("NumMembers"))
AssertCountByCond(t, "team_repo", builder.Eq{"team_id": team.int("ID")}, team.int("NumRepos")) AssertCountByCond(t, "team_repo", builder.Eq{"team_id": team.int("ID")}, team.int("NumRepos"))
} }
checkForActionConsistency := func(t assert.TestingT, bean interface{}) { checkForActionConsistency := func(t assert.TestingT, bean any) {
action := reflectionWrap(bean) action := reflectionWrap(bean)
if action.int("RepoID") != 1700 { // dangling intentional if action.int("RepoID") != 1700 { // dangling intentional
repoRow := AssertExistsAndLoadMap(t, "repository", builder.Eq{"id": action.int("RepoID")}) repoRow := AssertExistsAndLoadMap(t, "repository", builder.Eq{"id": action.int("RepoID")})

View file

@ -23,7 +23,7 @@ type reflectionValue struct {
v reflect.Value v reflect.Value
} }
func reflectionWrap(v interface{}) *reflectionValue { func reflectionWrap(v any) *reflectionValue {
return &reflectionValue{v: reflect.ValueOf(v)} return &reflectionValue{v: reflect.ValueOf(v)}
} }

View file

@ -37,7 +37,7 @@ func FixturesDir() string {
return fixturesDir return fixturesDir
} }
func fatalTestError(fmtStr string, args ...interface{}) { func fatalTestError(fmtStr string, args ...any) {
_, _ = fmt.Fprintf(os.Stderr, fmtStr, args...) _, _ = fmt.Fprintf(os.Stderr, fmtStr, args...)
os.Exit(1) os.Exit(1)
} }

View file

@ -57,11 +57,11 @@ func (err ErrExternalLoginUserNotExist) Unwrap() error {
// ExternalLoginUser makes the connecting between some existing user and additional external login sources // ExternalLoginUser makes the connecting between some existing user and additional external login sources
type ExternalLoginUser struct { type ExternalLoginUser struct {
ExternalID string `xorm:"pk NOT NULL"` ExternalID string `xorm:"pk NOT NULL"`
UserID int64 `xorm:"INDEX NOT NULL"` UserID int64 `xorm:"INDEX NOT NULL"`
LoginSourceID int64 `xorm:"pk NOT NULL"` LoginSourceID int64 `xorm:"pk NOT NULL"`
RawData map[string]interface{} `xorm:"TEXT JSON"` RawData map[string]any `xorm:"TEXT JSON"`
Provider string `xorm:"index VARCHAR(25)"` Provider string `xorm:"index VARCHAR(25)"`
Email string Email string
Name string Name string
FirstName string FirstName string

View file

@ -92,7 +92,7 @@ func (t *HookTask) AfterLoad() {
} }
} }
func (t *HookTask) simpleMarshalJSON(v interface{}) string { func (t *HookTask) simpleMarshalJSON(v any) string {
p, err := json.Marshal(v) p, err := json.Marshal(v)
if err != nil { if err != nil {
log.Error("Marshal [%d]: %v", t.ID, err) log.Error("Marshal [%d]: %v", t.ID, err)

View file

@ -21,7 +21,7 @@ import (
) )
func init() { func init() {
model.OnDecodeNodeError = func(node yaml.Node, out interface{}, err error) { model.OnDecodeNodeError = func(node yaml.Node, out any, err error) {
// Log the error instead of panic or fatal. // Log the error instead of panic or fatal.
// It will be a big job to refactor act/pkg/model to return decode error, // It will be a big job to refactor act/pkg/model to return decode error,
// so we just log the error and return empty value, and improve it later. // so we just log the error and return empty value, and improve it later.

View file

@ -107,7 +107,7 @@ const TimeLimitCodeLength = 12 + 6 + 40
// CreateTimeLimitCode create a time limit code // CreateTimeLimitCode create a time limit code
// code format: 12 length date time string + 6 minutes string + 40 sha1 encoded string // code format: 12 length date time string + 6 minutes string + 40 sha1 encoded string
func CreateTimeLimitCode(data string, minutes int, startInf interface{}) string { func CreateTimeLimitCode(data string, minutes int, startInf any) string {
format := "200601021504" format := "200601021504"
var start, end time.Time var start, end time.Time
@ -245,7 +245,7 @@ func SetupGiteaRoot() string {
} }
// FormatNumberSI format a number // FormatNumberSI format a number
func FormatNumberSI(data interface{}) string { func FormatNumberSI(data any) string {
var num int64 var num int64
if num1, ok := data.(int64); ok { if num1, ok := data.(int64); ok {
num = num1 num = num1

View file

@ -24,7 +24,7 @@ type RedisCacher struct {
} }
// toStr convert string/int/int64 interface to string. it's only used by the RedisCacher.Put internally // toStr convert string/int/int64 interface to string. it's only used by the RedisCacher.Put internally
func toStr(v interface{}) string { func toStr(v any) string {
if v == nil { if v == nil {
return "" return ""
} }
@ -44,7 +44,7 @@ func toStr(v interface{}) string {
// Put puts value (string type) into cache with key and expire time. // Put puts value (string type) into cache with key and expire time.
// If expired is 0, it lives forever. // If expired is 0, it lives forever.
func (c *RedisCacher) Put(key string, val interface{}, expire int64) error { func (c *RedisCacher) Put(key string, val any, expire int64) error {
// this function is not well-designed, it only puts string values into cache // this function is not well-designed, it only puts string values into cache
key = c.prefix + key key = c.prefix + key
if expire == 0 { if expire == 0 {
@ -65,7 +65,7 @@ func (c *RedisCacher) Put(key string, val interface{}, expire int64) error {
} }
// Get gets cached value by given key. // Get gets cached value by given key.
func (c *RedisCacher) Get(key string) interface{} { func (c *RedisCacher) Get(key string) any {
val, err := c.c.Get(graceful.GetManager().HammerContext(), c.prefix+key).Result() val, err := c.c.Get(graceful.GetManager().HammerContext(), c.prefix+key).Result()
if err != nil { if err != nil {
return nil return nil

View file

@ -30,7 +30,7 @@ type TwoQueueCacheConfig struct {
// MemoryItem represents a memory cache item. // MemoryItem represents a memory cache item.
type MemoryItem struct { type MemoryItem struct {
Val interface{} Val any
Created int64 Created int64
Timeout int64 Timeout int64
} }
@ -43,7 +43,7 @@ func (item *MemoryItem) hasExpired() bool {
var _ mc.Cache = &TwoQueueCache{} var _ mc.Cache = &TwoQueueCache{}
// Put puts value into cache with key and expire time. // Put puts value into cache with key and expire time.
func (c *TwoQueueCache) Put(key string, val interface{}, timeout int64) error { func (c *TwoQueueCache) Put(key string, val any, timeout int64) error {
item := &MemoryItem{ item := &MemoryItem{
Val: val, Val: val,
Created: time.Now().Unix(), Created: time.Now().Unix(),
@ -56,7 +56,7 @@ func (c *TwoQueueCache) Put(key string, val interface{}, timeout int64) error {
} }
// Get gets cached value by given key. // Get gets cached value by given key.
func (c *TwoQueueCache) Get(key string) interface{} { func (c *TwoQueueCache) Get(key string) any {
c.lock.Lock() c.lock.Lock()
defer c.lock.Unlock() defer c.lock.Unlock()
cached, ok := c.cache.Get(key) cached, ok := c.cache.Get(key)
@ -146,7 +146,7 @@ func (c *TwoQueueCache) Flush() error {
return nil return nil
} }
func (c *TwoQueueCache) checkAndInvalidate(key interface{}) { func (c *TwoQueueCache) checkAndInvalidate(key any) {
c.lock.Lock() c.lock.Lock()
defer c.lock.Unlock() defer c.lock.Unlock()
cached, ok := c.cache.Peek(key) cached, ok := c.cache.Peek(key)

View file

@ -90,7 +90,7 @@ Usage: %[1]s [-v] [-o output.go] ambiguous.json
sort.Slice(tables, func(i, j int) bool { sort.Slice(tables, func(i, j int) bool {
return tables[i].Locale < tables[j].Locale return tables[i].Locale < tables[j].Locale
}) })
data := map[string]interface{}{ data := map[string]any{
"Tables": tables, "Tables": tables,
} }
@ -99,7 +99,7 @@ Usage: %[1]s [-v] [-o output.go] ambiguous.json
} }
} }
func runTemplate(t *template.Template, filename string, data interface{}) error { func runTemplate(t *template.Template, filename string, data any) error {
buf := bytes.NewBuffer(nil) buf := bytes.NewBuffer(nil)
if err := t.Execute(buf, data); err != nil { if err := t.Execute(buf, data); err != nil {
return fmt.Errorf("unable to execute template: %w", err) return fmt.Errorf("unable to execute template: %w", err)
@ -172,17 +172,17 @@ var AmbiguousCharacters = map[string]*AmbiguousTable{
`)) `))
func logf(format string, args ...interface{}) { func logf(format string, args ...any) {
fmt.Fprintf(os.Stderr, format+"\n", args...) fmt.Fprintf(os.Stderr, format+"\n", args...)
} }
func verbosef(format string, args ...interface{}) { func verbosef(format string, args ...any) {
if verbose { if verbose {
logf(format, args...) logf(format, args...)
} }
} }
func fatalf(format string, args ...interface{}) { func fatalf(format string, args ...any) {
logf("fatal: "+format+"\n", args...) logf("fatal: "+format+"\n", args...)
os.Exit(1) os.Exit(1)
} }

View file

@ -52,7 +52,7 @@ Usage: %[1]s [-v] [-o output.go]
} }
} }
func runTemplate(t *template.Template, filename string, data interface{}) error { func runTemplate(t *template.Template, filename string, data any) error {
buf := bytes.NewBuffer(nil) buf := bytes.NewBuffer(nil)
if err := t.Execute(buf, data); err != nil { if err := t.Execute(buf, data); err != nil {
return fmt.Errorf("unable to execute template: %w", err) return fmt.Errorf("unable to execute template: %w", err)
@ -105,17 +105,17 @@ var InvisibleRanges = &unicode.RangeTable{
} }
`)) `))
func logf(format string, args ...interface{}) { func logf(format string, args ...any) {
fmt.Fprintf(os.Stderr, format+"\n", args...) fmt.Fprintf(os.Stderr, format+"\n", args...)
} }
func verbosef(format string, args ...interface{}) { func verbosef(format string, args ...any) {
if verbose { if verbose {
logf(format, args...) logf(format, args...)
} }
} }
func fatalf(format string, args ...interface{}) { func fatalf(format string, args ...any) {
logf("fatal: "+format+"\n", args...) logf("fatal: "+format+"\n", args...)
os.Exit(1) os.Exit(1)
} }

View file

@ -23,7 +23,7 @@ type routerLoggerOptions struct {
Identity *string Identity *string
Start *time.Time Start *time.Time
ResponseWriter http.ResponseWriter ResponseWriter http.ResponseWriter
Ctx map[string]interface{} Ctx map[string]any
RequestID *string RequestID *string
} }
@ -84,7 +84,7 @@ func AccessLogger() func(http.Handler) http.Handler {
Identity: &identity, Identity: &identity,
Start: &start, Start: &start,
ResponseWriter: rw, ResponseWriter: rw,
Ctx: map[string]interface{}{ Ctx: map[string]any{
"RemoteAddr": req.RemoteAddr, "RemoteAddr": req.RemoteAddr,
"RemoteHost": reqHost, "RemoteHost": reqHost,
"Req": req, "Req": req,

View file

@ -100,7 +100,7 @@ func (ctx *APIContext) ServerError(title string, err error) {
// Error responds with an error message to client with given obj as the message. // Error responds with an error message to client with given obj as the message.
// If status is 500, also it prints error to log. // If status is 500, also it prints error to log.
func (ctx *APIContext) Error(status int, title string, obj interface{}) { func (ctx *APIContext) Error(status int, title string, obj any) {
var message string var message string
if err, ok := obj.(error); ok { if err, ok := obj.(error); ok {
message = err.Error() message = err.Error()
@ -257,7 +257,7 @@ func APIContexter() func(http.Handler) http.Handler {
// NotFound handles 404s for APIContext // NotFound handles 404s for APIContext
// String will replace message, errors will be added to a slice // String will replace message, errors will be added to a slice
func (ctx *APIContext) NotFound(objs ...interface{}) { func (ctx *APIContext) NotFound(objs ...any) {
message := ctx.Tr("error.not_found") message := ctx.Tr("error.not_found")
var errors []string var errors []string
for _, obj := range objs { for _, obj := range objs {
@ -273,7 +273,7 @@ func (ctx *APIContext) NotFound(objs ...interface{}) {
} }
} }
ctx.JSON(http.StatusNotFound, map[string]interface{}{ ctx.JSON(http.StatusNotFound, map[string]any{
"message": message, "message": message,
"url": setting.API.SwaggerURL, "url": setting.API.SwaggerURL,
"errors": errors, "errors": errors,

View file

@ -124,7 +124,7 @@ func (b *Base) Error(status int, contents ...string) {
} }
// JSON render content as JSON // JSON render content as JSON
func (b *Base) JSON(status int, content interface{}) { func (b *Base) JSON(status int, content any) {
b.Resp.Header().Set("Content-Type", "application/json;charset=utf-8") b.Resp.Header().Set("Content-Type", "application/json;charset=utf-8")
b.Resp.WriteHeader(status) b.Resp.WriteHeader(status)
if err := json.NewEncoder(b.Resp).Encode(content); err != nil { if err := json.NewEncoder(b.Resp).Encode(content); err != nil {

View file

@ -60,7 +60,7 @@ const (
// VerifyCaptcha verifies Captcha data // VerifyCaptcha verifies Captcha data
// No-op if captchas are not enabled // No-op if captchas are not enabled
func VerifyCaptcha(ctx *Context, tpl base.TplName, form interface{}) { func VerifyCaptcha(ctx *Context, tpl base.TplName, form any) {
if !setting.Service.EnableCaptcha { if !setting.Service.EnableCaptcha {
return return
} }

View file

@ -30,7 +30,7 @@ import (
// Render represents a template render // Render represents a template render
type Render interface { type Render interface {
TemplateLookup(tmpl string) (templates.TemplateExecutor, error) TemplateLookup(tmpl string) (templates.TemplateExecutor, error)
HTML(w io.Writer, status int, name string, data interface{}) error HTML(w io.Writer, status int, name string, data any) error
} }
// Context represents context of a request. // Context represents context of a request.
@ -61,7 +61,7 @@ type Context struct {
// TrHTMLEscapeArgs runs ".Locale.Tr()" but pre-escapes all arguments with html.EscapeString. // TrHTMLEscapeArgs runs ".Locale.Tr()" but pre-escapes all arguments with html.EscapeString.
// This is useful if the locale message is intended to only produce HTML content. // This is useful if the locale message is intended to only produce HTML content.
func (ctx *Context) TrHTMLEscapeArgs(msg string, args ...string) string { func (ctx *Context) TrHTMLEscapeArgs(msg string, args ...string) string {
trArgs := make([]interface{}, len(args)) trArgs := make([]any, len(args))
for i, arg := range args { for i, arg := range args {
trArgs[i] = html.EscapeString(arg) trArgs[i] = html.EscapeString(arg)
} }

View file

@ -97,14 +97,14 @@ func (ctx *Context) HTML(status int, name base.TplName) {
} }
// RenderToString renders the template content to a string // RenderToString renders the template content to a string
func (ctx *Context) RenderToString(name base.TplName, data map[string]interface{}) (string, error) { func (ctx *Context) RenderToString(name base.TplName, data map[string]any) (string, error) {
var buf strings.Builder var buf strings.Builder
err := ctx.Render.HTML(&buf, http.StatusOK, string(name), data) err := ctx.Render.HTML(&buf, http.StatusOK, string(name), data)
return buf.String(), err return buf.String(), err
} }
// RenderWithErr used for page has form validation but need to prompt error to users. // RenderWithErr used for page has form validation but need to prompt error to users.
func (ctx *Context) RenderWithErr(msg string, tpl base.TplName, form interface{}) { func (ctx *Context) RenderWithErr(msg string, tpl base.TplName, form any) {
if form != nil { if form != nil {
middleware.AssignForm(form, ctx.Data) middleware.AssignForm(form, ctx.Data)
} }

View file

@ -33,7 +33,7 @@ type packageAssignmentCtx struct {
// PackageAssignment returns a middleware to handle Context.Package assignment // PackageAssignment returns a middleware to handle Context.Package assignment
func PackageAssignment() func(ctx *Context) { func PackageAssignment() func(ctx *Context) {
return func(ctx *Context) { return func(ctx *Context) {
errorFn := func(status int, title string, obj interface{}) { errorFn := func(status int, title string, obj any) {
err, ok := obj.(error) err, ok := obj.(error)
if !ok { if !ok {
err = fmt.Errorf("%s", obj) err = fmt.Errorf("%s", obj)
@ -57,7 +57,7 @@ func PackageAssignmentAPI() func(ctx *APIContext) {
} }
} }
func packageAssignment(ctx *packageAssignmentCtx, errCb func(int, string, interface{})) *Package { func packageAssignment(ctx *packageAssignmentCtx, errCb func(int, string, any)) *Package {
pkg := &Package{ pkg := &Package{
Owner: ctx.ContextUser, Owner: ctx.ContextUser,
} }

View file

@ -32,7 +32,7 @@ func (p *Pagination) AddParam(ctx *Context, paramKey, ctxKey string) {
if !exists { if !exists {
return return
} }
paramData := fmt.Sprintf("%v", ctx.Data[ctxKey]) // cast interface{} to string paramData := fmt.Sprintf("%v", ctx.Data[ctxKey]) // cast any to string
urlParam := fmt.Sprintf("%s=%v", url.QueryEscape(paramKey), url.QueryEscape(paramData)) urlParam := fmt.Sprintf("%s=%v", url.QueryEscape(paramKey), url.QueryEscape(paramData))
p.urlParams = append(p.urlParams, urlParam) p.urlParams = append(p.urlParams, urlParam)
} }

View file

@ -90,7 +90,7 @@ func RequireRepoReaderOr(unitTypes ...unit.Type) func(ctx *Context) {
} }
if log.IsTrace() { if log.IsTrace() {
var format string var format string
var args []interface{} var args []any
if ctx.IsSigned { if ctx.IsSigned {
format = "Permission Denied: User %-v cannot read [" format = "Permission Denied: User %-v cannot read ["
args = append(args, ctx.Doer) args = append(args, ctx.Doer)

View file

@ -45,7 +45,7 @@ func (ctx *PrivateContext) Err() error {
return ctx.Base.Err() return ctx.Base.Err()
} }
var privateContextKey interface{} = "default_private_context" var privateContextKey any = "default_private_context"
// GetPrivateContext returns a context for Private routes // GetPrivateContext returns a context for Private routes
func GetPrivateContext(req *http.Request) *PrivateContext { func GetPrivateContext(req *http.Request) *PrivateContext {

View file

@ -51,8 +51,8 @@ func wrapNewlines(w io.Writer, prefix, value []byte) (sum int64, err error) {
type Event struct { type Event struct {
// Name represents the value of the event: tag in the stream // Name represents the value of the event: tag in the stream
Name string Name string
// Data is either JSONified []byte or interface{} that can be JSONd // Data is either JSONified []byte or any that can be JSONd
Data interface{} Data any
// ID represents the ID of an event // ID represents the ID of an event
ID string ID string
// Retry tells the receiver only to attempt to reconnect to the source after this time // Retry tells the receiver only to attempt to reconnect to the source after this time

View file

@ -177,7 +177,7 @@ func GetLastCommitForPaths(ctx context.Context, cache *LastCommitCache, c cgobje
refSha := c.ID().String() refSha := c.ID().String()
// We do a tree traversal with nodes sorted by commit time // We do a tree traversal with nodes sorted by commit time
heap := binaryheap.NewWith(func(a, b interface{}) int { heap := binaryheap.NewWith(func(a, b any) int {
if a.(*commitAndPaths).commit.CommitTime().Before(b.(*commitAndPaths).commit.CommitTime()) { if a.(*commitAndPaths).commit.CommitTime().Before(b.(*commitAndPaths).commit.CommitTime()) {
return 1 return 1
} }

View file

@ -217,7 +217,7 @@ func TestParser(t *testing.T) {
} }
} }
func pretty(v interface{}) string { func pretty(v any) string {
data, err := json.MarshalIndent(v, "", " ") data, err := json.MarshalIndent(v, "", " ")
if err != nil { if err != nil {
// shouldn't happen // shouldn't happen

View file

@ -114,7 +114,7 @@ func VersionInfo() string {
return "(git not found)" return "(git not found)"
} }
format := "%s" format := "%s"
args := []interface{}{gitVersion.Original()} args := []any{gitVersion.Original()}
// Since git wire protocol has been released from git v2.18 // Since git wire protocol has been released from git v2.18
if setting.Git.EnableAutoGitWireProtocol && CheckGitVersionAtLeast("2.18") == nil { if setting.Git.EnableAutoGitWireProtocol && CheckGitVersionAtLeast("2.18") == nil {
format += ", Wire Protocol %s Enabled" format += ", Wire Protocol %s Enabled"

View file

@ -15,9 +15,9 @@ import (
// Cache represents a caching interface // Cache represents a caching interface
type Cache interface { type Cache interface {
// Put puts value into cache with key and expire time. // Put puts value into cache with key and expire time.
Put(key string, val interface{}, timeout int64) error Put(key string, val any, timeout int64) error
// Get gets cached value by given key. // Get gets cached value by given key.
Get(key string) interface{} Get(key string) any
} }
func getCacheKey(repoPath, commitID, entryPath string) string { func getCacheKey(repoPath, commitID, entryPath string) string {

View file

@ -15,17 +15,17 @@ import (
// ObjectCache provides thread-safe cache operations. // ObjectCache provides thread-safe cache operations.
type ObjectCache struct { type ObjectCache struct {
lock sync.RWMutex lock sync.RWMutex
cache map[string]interface{} cache map[string]any
} }
func newObjectCache() *ObjectCache { func newObjectCache() *ObjectCache {
return &ObjectCache{ return &ObjectCache{
cache: make(map[string]interface{}, 10), cache: make(map[string]any, 10),
} }
} }
// Set add obj to cache // Set add obj to cache
func (oc *ObjectCache) Set(id string, obj interface{}) { func (oc *ObjectCache) Set(id string, obj any) {
oc.lock.Lock() oc.lock.Lock()
defer oc.lock.Unlock() defer oc.lock.Unlock()
@ -33,7 +33,7 @@ func (oc *ObjectCache) Set(id string, obj interface{}) {
} }
// Get get cached obj by id // Get get cached obj by id
func (oc *ObjectCache) Get(id string) (interface{}, bool) { func (oc *ObjectCache) Get(id string) (any, bool) {
oc.lock.RLock() oc.lock.RLock()
defer oc.lock.RUnlock() defer oc.lock.RUnlock()

View file

@ -283,7 +283,7 @@ func (g *Manager) Err() error {
} }
// Value allows the manager to be viewed as a context.Context done at Terminate // Value allows the manager to be viewed as a context.Context done at Terminate
func (g *Manager) Value(key interface{}) interface{} { func (g *Manager) Value(key any) any {
return g.managerCtx.Value(key) return g.managerCtx.Value(key)
} }

View file

@ -5,7 +5,7 @@ package html
// ParseSizeAndClass get size and class from string with default values // ParseSizeAndClass get size and class from string with default values
// If present, "others" expects the new size first and then the classes to use // If present, "others" expects the new size first and then the classes to use
func ParseSizeAndClass(defaultSize int, defaultClass string, others ...interface{}) (int, string) { func ParseSizeAndClass(defaultSize int, defaultClass string, others ...any) (int, string) {
if len(others) == 0 { if len(others) == 0 {
return defaultSize, defaultClass return defaultSize, defaultClass
} }

View file

@ -101,7 +101,7 @@ func (r *Request) Param(key, value string) *Request {
// Body adds request raw body. // Body adds request raw body.
// it supports string and []byte. // it supports string and []byte.
func (r *Request) Body(data interface{}) *Request { func (r *Request) Body(data any) *Request {
switch t := data.(type) { switch t := data.(type) {
case string: case string:
bf := bytes.NewBufferString(t) bf := bytes.NewBufferString(t)

View file

@ -27,7 +27,7 @@ func NewFlushingBatch(index bleve.Index, maxBatchSize int) *FlushingBatch {
} }
// Index add a new index to batch // Index add a new index to batch
func (b *FlushingBatch) Index(id string, data interface{}) error { func (b *FlushingBatch) Index(id string, data any) error {
if err := b.batch.Index(id, data); err != nil { if err := b.batch.Index(id, data); err != nil {
return err return err
} }

View file

@ -53,7 +53,7 @@ func numericEqualityQuery(value int64, field string) *query.NumericRangeQuery {
} }
func addUnicodeNormalizeTokenFilter(m *mapping.IndexMappingImpl) error { func addUnicodeNormalizeTokenFilter(m *mapping.IndexMappingImpl) error {
return m.AddCustomTokenFilter(unicodeNormalizeName, map[string]interface{}{ return m.AddCustomTokenFilter(unicodeNormalizeName, map[string]any{
"type": unicodenorm.Name, "type": unicodenorm.Name,
"form": unicodenorm.NFC, "form": unicodenorm.NFC,
}) })
@ -135,7 +135,7 @@ func createBleveIndexer(path string, latestVersion int) (bleve.Index, error) {
mapping := bleve.NewIndexMapping() mapping := bleve.NewIndexMapping()
if err := addUnicodeNormalizeTokenFilter(mapping); err != nil { if err := addUnicodeNormalizeTokenFilter(mapping); err != nil {
return nil, err return nil, err
} else if err := mapping.AddCustomAnalyzer(repoIndexerAnalyzer, map[string]interface{}{ } else if err := mapping.AddCustomAnalyzer(repoIndexerAnalyzer, map[string]any{
"type": analyzer_custom.Name, "type": analyzer_custom.Name,
"char_filters": []string{}, "char_filters": []string{},
"tokenizer": unicode.Name, "tokenizer": unicode.Name,

View file

@ -241,7 +241,7 @@ func (b *ElasticSearchIndexer) addUpdate(ctx context.Context, batchWriter git.Wr
elastic.NewBulkIndexRequest(). elastic.NewBulkIndexRequest().
Index(b.indexerAliasName). Index(b.indexerAliasName).
Id(id). Id(id).
Doc(map[string]interface{}{ Doc(map[string]any{
"repo_id": repo.ID, "repo_id": repo.ID,
"content": string(charset.ToUTF8DropErrors(fileContents)), "content": string(charset.ToUTF8DropErrors(fileContents)),
"commit_id": sha, "commit_id": sha,
@ -342,7 +342,7 @@ func convertResult(searchResult *elastic.SearchResult, kw string, pageSize int)
} }
repoID, fileName := parseIndexerID(hit.Id) repoID, fileName := parseIndexerID(hit.Id)
res := make(map[string]interface{}) res := make(map[string]any)
if err := json.Unmarshal(hit.Source, &res); err != nil { if err := json.Unmarshal(hit.Source, &res); err != nil {
return 0, nil, nil, err return 0, nil, nil, err
} }
@ -393,7 +393,7 @@ func (b *ElasticSearchIndexer) Search(ctx context.Context, repoIDs []int64, lang
query := elastic.NewBoolQuery() query := elastic.NewBoolQuery()
query = query.Must(kwQuery) query = query.Must(kwQuery)
if len(repoIDs) > 0 { if len(repoIDs) > 0 {
repoStrs := make([]interface{}, 0, len(repoIDs)) repoStrs := make([]any, 0, len(repoIDs))
for _, repoID := range repoIDs { for _, repoID := range repoIDs {
repoStrs = append(repoStrs, repoID) repoStrs = append(repoStrs, repoID)
} }

View file

@ -64,7 +64,7 @@ func newMatchPhraseQuery(matchPhrase, field, analyzer string) *query.MatchPhrase
const unicodeNormalizeName = "unicodeNormalize" const unicodeNormalizeName = "unicodeNormalize"
func addUnicodeNormalizeTokenFilter(m *mapping.IndexMappingImpl) error { func addUnicodeNormalizeTokenFilter(m *mapping.IndexMappingImpl) error {
return m.AddCustomTokenFilter(unicodeNormalizeName, map[string]interface{}{ return m.AddCustomTokenFilter(unicodeNormalizeName, map[string]any{
"type": unicodenorm.Name, "type": unicodenorm.Name,
"form": unicodenorm.NFC, "form": unicodenorm.NFC,
}) })
@ -131,7 +131,7 @@ func createIssueIndexer(path string, latestVersion int) (bleve.Index, error) {
if err := addUnicodeNormalizeTokenFilter(mapping); err != nil { if err := addUnicodeNormalizeTokenFilter(mapping); err != nil {
return nil, err return nil, err
} else if err = mapping.AddCustomAnalyzer(issueIndexerAnalyzer, map[string]interface{}{ } else if err = mapping.AddCustomAnalyzer(issueIndexerAnalyzer, map[string]any{
"type": custom.Name, "type": custom.Name,
"char_filters": []string{}, "char_filters": []string{},
"tokenizer": unicode.Name, "tokenizer": unicode.Name,

View file

@ -140,7 +140,7 @@ func (b *ElasticSearchIndexer) Index(issues []*IndexerData) error {
_, err := b.client.Index(). _, err := b.client.Index().
Index(b.indexerName). Index(b.indexerName).
Id(fmt.Sprintf("%d", issue.ID)). Id(fmt.Sprintf("%d", issue.ID)).
BodyJson(map[string]interface{}{ BodyJson(map[string]any{
"id": issue.ID, "id": issue.ID,
"repo_id": issue.RepoID, "repo_id": issue.RepoID,
"title": issue.Title, "title": issue.Title,
@ -157,7 +157,7 @@ func (b *ElasticSearchIndexer) Index(issues []*IndexerData) error {
elastic.NewBulkIndexRequest(). elastic.NewBulkIndexRequest().
Index(b.indexerName). Index(b.indexerName).
Id(fmt.Sprintf("%d", issue.ID)). Id(fmt.Sprintf("%d", issue.ID)).
Doc(map[string]interface{}{ Doc(map[string]any{
"id": issue.ID, "id": issue.ID,
"repo_id": issue.RepoID, "repo_id": issue.RepoID,
"title": issue.Title, "title": issue.Title,
@ -209,7 +209,7 @@ func (b *ElasticSearchIndexer) Search(ctx context.Context, keyword string, repoI
query := elastic.NewBoolQuery() query := elastic.NewBoolQuery()
query = query.Must(kwQuery) query = query.Must(kwQuery)
if len(repoIDs) > 0 { if len(repoIDs) > 0 {
repoStrs := make([]interface{}, 0, len(repoIDs)) repoStrs := make([]any, 0, len(repoIDs))
for _, repoID := range repoIDs { for _, repoID := range repoIDs {
repoStrs = append(repoStrs, repoID) repoStrs = append(repoStrs, repoID)
} }

View file

@ -130,7 +130,7 @@ func (b *MeilisearchIndexer) Search(ctx context.Context, keyword string, repoIDs
hits := make([]Match, 0, len(searchRes.Hits)) hits := make([]Match, 0, len(searchRes.Hits))
for _, hit := range searchRes.Hits { for _, hit := range searchRes.Hits {
hits = append(hits, Match{ hits = append(hits, Match{
ID: int64(hit.(map[string]interface{})["id"].(float64)), ID: int64(hit.(map[string]any)["id"].(float64)),
}) })
} }
return &SearchResult{ return &SearchResult{

View file

@ -151,7 +151,7 @@ func validateOptions(field *api.IssueFormField, idx int) error {
} }
position := newErrorPosition(idx, field.Type) position := newErrorPosition(idx, field.Type)
options, ok := field.Attributes["options"].([]interface{}) options, ok := field.Attributes["options"].([]any)
if !ok || len(options) == 0 { if !ok || len(options) == 0 {
return position.Errorf("'options' is required and should be a array") return position.Errorf("'options' is required and should be a array")
} }
@ -164,7 +164,7 @@ func validateOptions(field *api.IssueFormField, idx int) error {
return position.Errorf("should be a string") return position.Errorf("should be a string")
} }
case api.IssueFormFieldTypeCheckboxes: case api.IssueFormFieldTypeCheckboxes:
opt, ok := option.(map[string]interface{}) opt, ok := option.(map[string]any)
if !ok { if !ok {
return position.Errorf("should be a dictionary") return position.Errorf("should be a dictionary")
} }
@ -182,7 +182,7 @@ func validateOptions(field *api.IssueFormField, idx int) error {
return nil return nil
} }
func validateStringItem(position errorPosition, m map[string]interface{}, required bool, names ...string) error { func validateStringItem(position errorPosition, m map[string]any, required bool, names ...string) error {
for _, name := range names { for _, name := range names {
v, ok := m[name] v, ok := m[name]
if !ok { if !ok {
@ -202,7 +202,7 @@ func validateStringItem(position errorPosition, m map[string]interface{}, requir
return nil return nil
} }
func validateBoolItem(position errorPosition, m map[string]interface{}, names ...string) error { func validateBoolItem(position errorPosition, m map[string]any, names ...string) error {
for _, name := range names { for _, name := range names {
v, ok := m[name] v, ok := m[name]
if !ok { if !ok {
@ -217,7 +217,7 @@ func validateBoolItem(position errorPosition, m map[string]interface{}, names ..
type errorPosition string type errorPosition string
func (p errorPosition) Errorf(format string, a ...interface{}) error { func (p errorPosition) Errorf(format string, a ...any) error {
return fmt.Errorf(string(p)+": "+format, a...) return fmt.Errorf(string(p)+": "+format, a...)
} }
@ -332,7 +332,7 @@ func (f *valuedField) Value() string {
} }
func (f *valuedField) Options() []*valuedOption { func (f *valuedField) Options() []*valuedOption {
if options, ok := f.Attributes["options"].([]interface{}); ok { if options, ok := f.Attributes["options"].([]any); ok {
ret := make([]*valuedOption, 0, len(options)) ret := make([]*valuedOption, 0, len(options))
for i, option := range options { for i, option := range options {
ret = append(ret, &valuedOption{ ret = append(ret, &valuedOption{
@ -348,7 +348,7 @@ func (f *valuedField) Options() []*valuedOption {
type valuedOption struct { type valuedOption struct {
index int index int
data interface{} data any
field *valuedField field *valuedField
} }
@ -359,7 +359,7 @@ func (o *valuedOption) Label() string {
return label return label
} }
case api.IssueFormFieldTypeCheckboxes: case api.IssueFormFieldTypeCheckboxes:
if vs, ok := o.data.(map[string]interface{}); ok { if vs, ok := o.data.(map[string]any); ok {
if v, ok := vs["label"].(string); ok { if v, ok := vs["label"].(string); ok {
return v return v
} }

View file

@ -387,34 +387,34 @@ body:
{ {
Type: "markdown", Type: "markdown",
ID: "id1", ID: "id1",
Attributes: map[string]interface{}{ Attributes: map[string]any{
"value": "Value of the markdown", "value": "Value of the markdown",
}, },
}, },
{ {
Type: "textarea", Type: "textarea",
ID: "id2", ID: "id2",
Attributes: map[string]interface{}{ Attributes: map[string]any{
"label": "Label of textarea", "label": "Label of textarea",
"description": "Description of textarea", "description": "Description of textarea",
"placeholder": "Placeholder of textarea", "placeholder": "Placeholder of textarea",
"value": "Value of textarea", "value": "Value of textarea",
"render": "bash", "render": "bash",
}, },
Validations: map[string]interface{}{ Validations: map[string]any{
"required": true, "required": true,
}, },
}, },
{ {
Type: "input", Type: "input",
ID: "id3", ID: "id3",
Attributes: map[string]interface{}{ Attributes: map[string]any{
"label": "Label of input", "label": "Label of input",
"description": "Description of input", "description": "Description of input",
"placeholder": "Placeholder of input", "placeholder": "Placeholder of input",
"value": "Value of input", "value": "Value of input",
}, },
Validations: map[string]interface{}{ Validations: map[string]any{
"required": true, "required": true,
"is_number": true, "is_number": true,
"regex": "[a-zA-Z0-9]+", "regex": "[a-zA-Z0-9]+",
@ -423,30 +423,30 @@ body:
{ {
Type: "dropdown", Type: "dropdown",
ID: "id4", ID: "id4",
Attributes: map[string]interface{}{ Attributes: map[string]any{
"label": "Label of dropdown", "label": "Label of dropdown",
"description": "Description of dropdown", "description": "Description of dropdown",
"multiple": true, "multiple": true,
"options": []interface{}{ "options": []any{
"Option 1 of dropdown", "Option 1 of dropdown",
"Option 2 of dropdown", "Option 2 of dropdown",
"Option 3 of dropdown", "Option 3 of dropdown",
}, },
}, },
Validations: map[string]interface{}{ Validations: map[string]any{
"required": true, "required": true,
}, },
}, },
{ {
Type: "checkboxes", Type: "checkboxes",
ID: "id5", ID: "id5",
Attributes: map[string]interface{}{ Attributes: map[string]any{
"label": "Label of checkboxes", "label": "Label of checkboxes",
"description": "Description of checkboxes", "description": "Description of checkboxes",
"options": []interface{}{ "options": []any{
map[string]interface{}{"label": "Option 1 of checkboxes", "required": true}, map[string]any{"label": "Option 1 of checkboxes", "required": true},
map[string]interface{}{"label": "Option 2 of checkboxes", "required": false}, map[string]any{"label": "Option 2 of checkboxes", "required": false},
map[string]interface{}{"label": "Option 3 of checkboxes", "required": true}, map[string]any{"label": "Option 3 of checkboxes", "required": true},
}, },
}, },
}, },
@ -479,7 +479,7 @@ body:
{ {
Type: "markdown", Type: "markdown",
ID: "id1", ID: "id1",
Attributes: map[string]interface{}{ Attributes: map[string]any{
"value": "Value of the markdown", "value": "Value of the markdown",
}, },
}, },
@ -512,7 +512,7 @@ body:
{ {
Type: "markdown", Type: "markdown",
ID: "id1", ID: "id1",
Attributes: map[string]interface{}{ Attributes: map[string]any{
"value": "Value of the markdown", "value": "Value of the markdown",
}, },
}, },
@ -545,7 +545,7 @@ body:
{ {
Type: "markdown", Type: "markdown",
ID: "id1", ID: "id1",
Attributes: map[string]interface{}{ Attributes: map[string]any{
"value": "Value of the markdown", "value": "Value of the markdown",
}, },
}, },

View file

@ -15,18 +15,18 @@ import (
// Encoder represents an encoder for json // Encoder represents an encoder for json
type Encoder interface { type Encoder interface {
Encode(v interface{}) error Encode(v any) error
} }
// Decoder represents a decoder for json // Decoder represents a decoder for json
type Decoder interface { type Decoder interface {
Decode(v interface{}) error Decode(v any) error
} }
// Interface represents an interface to handle json data // Interface represents an interface to handle json data
type Interface interface { type Interface interface {
Marshal(v interface{}) ([]byte, error) Marshal(v any) ([]byte, error)
Unmarshal(data []byte, v interface{}) error Unmarshal(data []byte, v any) error
NewEncoder(writer io.Writer) Encoder NewEncoder(writer io.Writer) Encoder
NewDecoder(reader io.Reader) Decoder NewDecoder(reader io.Reader) Decoder
Indent(dst *bytes.Buffer, src []byte, prefix, indent string) error Indent(dst *bytes.Buffer, src []byte, prefix, indent string) error
@ -44,12 +44,12 @@ var (
type StdJSON struct{} type StdJSON struct{}
// Marshal implements Interface // Marshal implements Interface
func (StdJSON) Marshal(v interface{}) ([]byte, error) { func (StdJSON) Marshal(v any) ([]byte, error) {
return json.Marshal(v) return json.Marshal(v)
} }
// Unmarshal implements Interface // Unmarshal implements Interface
func (StdJSON) Unmarshal(data []byte, v interface{}) error { func (StdJSON) Unmarshal(data []byte, v any) error {
return json.Unmarshal(data, v) return json.Unmarshal(data, v)
} }
@ -74,12 +74,12 @@ type JSONiter struct {
} }
// Marshal implements Interface // Marshal implements Interface
func (j JSONiter) Marshal(v interface{}) ([]byte, error) { func (j JSONiter) Marshal(v any) ([]byte, error) {
return j.API.Marshal(v) return j.API.Marshal(v)
} }
// Unmarshal implements Interface // Unmarshal implements Interface
func (j JSONiter) Unmarshal(data []byte, v interface{}) error { func (j JSONiter) Unmarshal(data []byte, v any) error {
return j.API.Unmarshal(data, v) return j.API.Unmarshal(data, v)
} }
@ -99,12 +99,12 @@ func (j JSONiter) Indent(dst *bytes.Buffer, src []byte, prefix, indent string) e
} }
// Marshal converts object as bytes // Marshal converts object as bytes
func Marshal(v interface{}) ([]byte, error) { func Marshal(v any) ([]byte, error) {
return DefaultJSONHandler.Marshal(v) return DefaultJSONHandler.Marshal(v)
} }
// Unmarshal decodes object from bytes // Unmarshal decodes object from bytes
func Unmarshal(data []byte, v interface{}) error { func Unmarshal(data []byte, v any) error {
return DefaultJSONHandler.Unmarshal(data, v) return DefaultJSONHandler.Unmarshal(data, v)
} }
@ -124,7 +124,7 @@ func Indent(dst *bytes.Buffer, src []byte, prefix, indent string) error {
} }
// MarshalIndent copied from encoding/json // MarshalIndent copied from encoding/json
func MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) { func MarshalIndent(v any, prefix, indent string) ([]byte, error) {
b, err := Marshal(v) b, err := Marshal(v)
if err != nil { if err != nil {
return nil, err return nil, err
@ -144,7 +144,7 @@ func Valid(data []byte) bool {
// UnmarshalHandleDoubleEncode - due to a bug in xorm (see https://gitea.com/xorm/xorm/pulls/1957) - it's // UnmarshalHandleDoubleEncode - due to a bug in xorm (see https://gitea.com/xorm/xorm/pulls/1957) - it's
// possible that a Blob may be double encoded or gain an unwanted prefix of 0xff 0xfe. // possible that a Blob may be double encoded or gain an unwanted prefix of 0xff 0xfe.
func UnmarshalHandleDoubleEncode(bs []byte, v interface{}) error { func UnmarshalHandleDoubleEncode(bs []byte, v any) error {
err := json.Unmarshal(bs, v) err := json.Unmarshal(bs, v)
if err != nil { if err != nil {
ok := true ok := true

View file

@ -120,7 +120,7 @@ func TestRender_IssueIndexPattern2(t *testing.T) {
isExternal = true isExternal = true
} }
links := make([]interface{}, len(indices)) links := make([]any, len(indices))
for i, index := range indices { for i, index := range indices {
links[i] = numericIssueLink(util.URLJoin(TestRepoURL, path), "ref-issue", index, marker) links[i] = numericIssueLink(util.URLJoin(TestRepoURL, path), "ref-issue", index, marker)
} }
@ -204,7 +204,7 @@ func TestRender_IssueIndexPattern4(t *testing.T) {
// alphanumeric: render inputs with valid mentions // alphanumeric: render inputs with valid mentions
test := func(s, expectedFmt string, names ...string) { test := func(s, expectedFmt string, names ...string) {
links := make([]interface{}, len(names)) links := make([]any, len(names))
for i, name := range names { for i, name := range names {
links[i] = externalIssueLink("https://someurl.com/someUser/someRepo/", "ref-issue ref-external-issue", name) links[i] = externalIssueLink("https://someurl.com/someUser/someRepo/", "ref-issue ref-external-issue", name)
} }
@ -226,7 +226,7 @@ func TestRender_IssueIndexPattern5(t *testing.T) {
test := func(s, expectedFmt, pattern string, ids, names []string) { test := func(s, expectedFmt, pattern string, ids, names []string) {
metas := regexpMetas metas := regexpMetas
metas["regexp"] = pattern metas["regexp"] = pattern
links := make([]interface{}, len(ids)) links := make([]any, len(ids))
for i, id := range ids { for i, id := range ids {
links[i] = link(util.URLJoin("https://someurl.com/someUser/someRepo/", id), "ref-issue ref-external-issue", names[i]) links[i] = link(util.URLJoin("https://someurl.com/someUser/someRepo/", id), "ref-issue ref-external-issue", names[i])
} }

View file

@ -55,14 +55,14 @@ func isYAMLSeparator(line []byte) bool {
// ExtractMetadata consumes a markdown file, parses YAML frontmatter, // ExtractMetadata consumes a markdown file, parses YAML frontmatter,
// and returns the frontmatter metadata separated from the markdown content // and returns the frontmatter metadata separated from the markdown content
func ExtractMetadata(contents string, out interface{}) (string, error) { func ExtractMetadata(contents string, out any) (string, error) {
body, err := ExtractMetadataBytes([]byte(contents), out) body, err := ExtractMetadataBytes([]byte(contents), out)
return string(body), err return string(body), err
} }
// ExtractMetadata consumes a markdown file, parses YAML frontmatter, // ExtractMetadata consumes a markdown file, parses YAML frontmatter,
// and returns the frontmatter metadata separated from the markdown content // and returns the frontmatter metadata separated from the markdown content
func ExtractMetadataBytes(contents []byte, out interface{}) ([]byte, error) { func ExtractMetadataBytes(contents []byte, out any) ([]byte, error) {
var front, body []byte var front, body []byte
start, end := 0, len(contents) start, end := 0, len(contents)

View file

@ -24,7 +24,7 @@ type Comment struct {
Updated time.Time Updated time.Time
Content string Content string
Reactions []*Reaction Reactions []*Reaction
Meta map[string]interface{} `yaml:"meta,omitempty"` // see models/issues/comment.go for fields in Comment struct Meta map[string]any `yaml:"meta,omitempty"` // see models/issues/comment.go for fields in Comment struct
} }
// GetExternalName ExternalUserMigrated interface // GetExternalName ExternalUserMigrated interface

View file

@ -34,4 +34,4 @@ type DownloaderFactory interface {
} }
// DownloaderContext has opaque information only relevant to a given downloader // DownloaderContext has opaque information only relevant to a given downloader
type DownloaderContext interface{} type DownloaderContext any

View file

@ -17,7 +17,7 @@ import (
) )
// Load project data from file, with optional validation // Load project data from file, with optional validation
func Load(filename string, data interface{}, validation bool) error { func Load(filename string, data any, validation bool) error {
isJSON := strings.HasSuffix(filename, ".json") isJSON := strings.HasSuffix(filename, ".json")
bs, err := os.ReadFile(filename) bs, err := os.ReadFile(filename)
@ -34,7 +34,7 @@ func Load(filename string, data interface{}, validation bool) error {
return unmarshal(bs, data, isJSON) return unmarshal(bs, data, isJSON)
} }
func unmarshal(bs []byte, data interface{}, isJSON bool) error { func unmarshal(bs []byte, data any, isJSON bool) error {
if isJSON { if isJSON {
return json.Unmarshal(bs, data) return json.Unmarshal(bs, data)
} }
@ -47,8 +47,8 @@ func getSchema(filename string) (*jsonschema.Schema, error) {
return c.Compile(filename) return c.Compile(filename)
} }
func validate(bs []byte, datatype interface{}, isJSON bool) error { func validate(bs []byte, datatype any, isJSON bool) error {
var v interface{} var v any
err := unmarshal(bs, &v, isJSON) err := unmarshal(bs, &v, isJSON)
if err != nil { if err != nil {
return err return err
@ -81,11 +81,11 @@ func validate(bs []byte, datatype interface{}, isJSON bool) error {
return err return err
} }
func toStringKeys(val interface{}) (interface{}, error) { func toStringKeys(val any) (any, error) {
var err error var err error
switch val := val.(type) { switch val := val.(type) {
case map[string]interface{}: case map[string]any:
m := make(map[string]interface{}) m := make(map[string]any)
for k, v := range val { for k, v := range val {
m[k], err = toStringKeys(v) m[k], err = toStringKeys(v)
if err != nil { if err != nil {
@ -93,8 +93,8 @@ func toStringKeys(val interface{}) (interface{}, error) {
} }
} }
return m, nil return m, nil
case []interface{}: case []any:
l := make([]interface{}, len(val)) l := make([]any, len(val))
for i, v := range val { for i, v := range val {
l[i], err = toStringKeys(v) l[i], err = toStringKeys(v)
if err != nil { if err != nil {

View file

@ -4,7 +4,7 @@
package migration package migration
// Messenger is a formatting function similar to i18n.Tr // Messenger is a formatting function similar to i18n.Tr
type Messenger func(key string, args ...interface{}) type Messenger func(key string, args ...any)
// NilMessenger represents an empty formatting function // NilMessenger represents an empty formatting function
func NilMessenger(string, ...interface{}) {} func NilMessenger(string, ...any) {}

View file

@ -54,7 +54,7 @@ func (m *Manager) GetLevelDB(connection string) (db *leveldb.DB, err error) {
// Because we want associate any goroutines created by this call to the main nosqldb context we need to // Because we want associate any goroutines created by this call to the main nosqldb context we need to
// wrap this in a goroutine labelled with the nosqldb context // wrap this in a goroutine labelled with the nosqldb context
done := make(chan struct{}) done := make(chan struct{})
var recovered interface{} var recovered any
go func() { go func() {
defer func() { defer func() {
recovered = recover() recovered = recover()

View file

@ -47,7 +47,7 @@ func (m *Manager) GetRedisClient(connection string) (client redis.UniversalClien
// Because we want associate any goroutines created by this call to the main nosqldb context we need to // Because we want associate any goroutines created by this call to the main nosqldb context we need to
// wrap this in a goroutine labelled with the nosqldb context // wrap this in a goroutine labelled with the nosqldb context
done := make(chan struct{}) done := make(chan struct{})
var recovered interface{} var recovered any
go func() { go func() {
defer func() { defer func() {
recovered = recover() recovered = recover()

View file

@ -38,18 +38,18 @@ type Package struct {
// Metadata represents the metadata of a Composer package // Metadata represents the metadata of a Composer package
type Metadata struct { type Metadata struct {
Description string `json:"description,omitempty"` Description string `json:"description,omitempty"`
Keywords []string `json:"keywords,omitempty"` Keywords []string `json:"keywords,omitempty"`
Homepage string `json:"homepage,omitempty"` Homepage string `json:"homepage,omitempty"`
License Licenses `json:"license,omitempty"` License Licenses `json:"license,omitempty"`
Authors []Author `json:"authors,omitempty"` Authors []Author `json:"authors,omitempty"`
Autoload map[string]interface{} `json:"autoload,omitempty"` Autoload map[string]any `json:"autoload,omitempty"`
AutoloadDev map[string]interface{} `json:"autoload-dev,omitempty"` AutoloadDev map[string]any `json:"autoload-dev,omitempty"`
Extra map[string]interface{} `json:"extra,omitempty"` Extra map[string]any `json:"extra,omitempty"`
Require map[string]string `json:"require,omitempty"` Require map[string]string `json:"require,omitempty"`
RequireDev map[string]string `json:"require-dev,omitempty"` RequireDev map[string]string `json:"require-dev,omitempty"`
Suggest map[string]string `json:"suggest,omitempty"` Suggest map[string]string `json:"suggest,omitempty"`
Provide map[string]string `json:"provide,omitempty"` Provide map[string]string `json:"provide,omitempty"`
} }
// Licenses represents the licenses of a Composer package // Licenses represents the licenses of a Composer package

View file

@ -55,14 +55,14 @@ type Maintainer struct {
} }
type Dependency struct { type Dependency struct {
Name string `json:"name" yaml:"name"` Name string `json:"name" yaml:"name"`
Version string `json:"version,omitempty" yaml:"version,omitempty"` Version string `json:"version,omitempty" yaml:"version,omitempty"`
Repository string `json:"repository" yaml:"repository"` Repository string `json:"repository" yaml:"repository"`
Condition string `json:"condition,omitempty" yaml:"condition,omitempty"` Condition string `json:"condition,omitempty" yaml:"condition,omitempty"`
Tags []string `json:"tags,omitempty" yaml:"tags,omitempty"` Tags []string `json:"tags,omitempty" yaml:"tags,omitempty"`
Enabled bool `json:"enabled,omitempty" yaml:"enabled,omitempty"` Enabled bool `json:"enabled,omitempty" yaml:"enabled,omitempty"`
ImportValues []interface{} `json:"import_values,omitempty" yaml:"import-values,omitempty"` ImportValues []any `json:"import_values,omitempty" yaml:"import-values,omitempty"`
Alias string `json:"alias,omitempty" yaml:"alias,omitempty"` Alias string `json:"alias,omitempty" yaml:"alias,omitempty"`
} }
// ParseChartArchive parses the metadata of a Helm archive // ParseChartArchive parses the metadata of a Helm archive

View file

@ -38,12 +38,12 @@ type Package struct {
// Metadata represents the metadata of a Pub package // Metadata represents the metadata of a Pub package
type Metadata struct { type Metadata struct {
Description string `json:"description,omitempty"` Description string `json:"description,omitempty"`
ProjectURL string `json:"project_url,omitempty"` ProjectURL string `json:"project_url,omitempty"`
RepositoryURL string `json:"repository_url,omitempty"` RepositoryURL string `json:"repository_url,omitempty"`
DocumentationURL string `json:"documentation_url,omitempty"` DocumentationURL string `json:"documentation_url,omitempty"`
Readme string `json:"readme,omitempty"` Readme string `json:"readme,omitempty"`
Pubspec interface{} `json:"pubspec"` Pubspec any `json:"pubspec"`
} }
type pubspecPackage struct { type pubspecPackage struct {
@ -134,7 +134,7 @@ func ParsePubspecMetadata(r io.Reader) (*Package, error) {
p.Repository = "" p.Repository = ""
} }
var pubspec interface{} var pubspec any
if err := yaml.Unmarshal(buf, &pubspec); err != nil { if err := yaml.Unmarshal(buf, &pubspec); err != nil {
return nil, err return nil, err
} }

View file

@ -40,19 +40,19 @@ var (
// RubyUserMarshal is a Ruby object that has a marshal_load function. // RubyUserMarshal is a Ruby object that has a marshal_load function.
type RubyUserMarshal struct { type RubyUserMarshal struct {
Name string Name string
Value interface{} Value any
} }
// RubyUserDef is a Ruby object that has a _load function. // RubyUserDef is a Ruby object that has a _load function.
type RubyUserDef struct { type RubyUserDef struct {
Name string Name string
Value interface{} Value any
} }
// RubyObject is a default Ruby object. // RubyObject is a default Ruby object.
type RubyObject struct { type RubyObject struct {
Name string Name string
Member map[string]interface{} Member map[string]any
} }
// MarshalEncoder mimics Rubys Marshal class. // MarshalEncoder mimics Rubys Marshal class.
@ -71,7 +71,7 @@ func NewMarshalEncoder(w io.Writer) *MarshalEncoder {
} }
// Encode encodes the given type // Encode encodes the given type
func (e *MarshalEncoder) Encode(v interface{}) error { func (e *MarshalEncoder) Encode(v any) error {
if _, err := e.w.Write([]byte{majorVersion, minorVersion}); err != nil { if _, err := e.w.Write([]byte{majorVersion, minorVersion}); err != nil {
return err return err
} }
@ -83,7 +83,7 @@ func (e *MarshalEncoder) Encode(v interface{}) error {
return e.w.Flush() return e.w.Flush()
} }
func (e *MarshalEncoder) marshal(v interface{}) error { func (e *MarshalEncoder) marshal(v any) error {
if v == nil { if v == nil {
return e.marshalNil() return e.marshalNil()
} }

View file

@ -12,7 +12,7 @@ import (
func TestMinimalEncoder(t *testing.T) { func TestMinimalEncoder(t *testing.T) {
cases := []struct { cases := []struct {
Value interface{} Value any
Expected []byte Expected []byte
Error error Error error
}{ }{
@ -73,7 +73,7 @@ func TestMinimalEncoder(t *testing.T) {
{ {
Value: &RubyObject{ Value: &RubyObject{
Name: "Test", Name: "Test",
Member: map[string]interface{}{ Member: map[string]any{
"test": 4, "test": 4,
}, },
}, },

View file

@ -65,12 +65,12 @@ type gemspec struct {
Version struct { Version struct {
Version string `yaml:"version"` Version string `yaml:"version"`
} `yaml:"version"` } `yaml:"version"`
Platform string `yaml:"platform"` Platform string `yaml:"platform"`
Authors []string `yaml:"authors"` Authors []string `yaml:"authors"`
Autorequire interface{} `yaml:"autorequire"` Autorequire any `yaml:"autorequire"`
Bindir string `yaml:"bindir"` Bindir string `yaml:"bindir"`
CertChain []interface{} `yaml:"cert_chain"` CertChain []any `yaml:"cert_chain"`
Date string `yaml:"date"` Date string `yaml:"date"`
Dependencies []struct { Dependencies []struct {
Name string `yaml:"name"` Name string `yaml:"name"`
Requirement requirement `yaml:"requirement"` Requirement requirement `yaml:"requirement"`
@ -78,34 +78,34 @@ type gemspec struct {
Prerelease bool `yaml:"prerelease"` Prerelease bool `yaml:"prerelease"`
VersionRequirements requirement `yaml:"version_requirements"` VersionRequirements requirement `yaml:"version_requirements"`
} `yaml:"dependencies"` } `yaml:"dependencies"`
Description string `yaml:"description"` Description string `yaml:"description"`
Executables []string `yaml:"executables"` Executables []string `yaml:"executables"`
Extensions []interface{} `yaml:"extensions"` Extensions []any `yaml:"extensions"`
ExtraRdocFiles []string `yaml:"extra_rdoc_files"` ExtraRdocFiles []string `yaml:"extra_rdoc_files"`
Files []string `yaml:"files"` Files []string `yaml:"files"`
Homepage string `yaml:"homepage"` Homepage string `yaml:"homepage"`
Licenses []string `yaml:"licenses"` Licenses []string `yaml:"licenses"`
Metadata struct { Metadata struct {
BugTrackerURI string `yaml:"bug_tracker_uri"` BugTrackerURI string `yaml:"bug_tracker_uri"`
ChangelogURI string `yaml:"changelog_uri"` ChangelogURI string `yaml:"changelog_uri"`
DocumentationURI string `yaml:"documentation_uri"` DocumentationURI string `yaml:"documentation_uri"`
SourceCodeURI string `yaml:"source_code_uri"` SourceCodeURI string `yaml:"source_code_uri"`
} `yaml:"metadata"` } `yaml:"metadata"`
PostInstallMessage interface{} `yaml:"post_install_message"` PostInstallMessage any `yaml:"post_install_message"`
RdocOptions []interface{} `yaml:"rdoc_options"` RdocOptions []any `yaml:"rdoc_options"`
RequirePaths []string `yaml:"require_paths"` RequirePaths []string `yaml:"require_paths"`
RequiredRubyVersion requirement `yaml:"required_ruby_version"` RequiredRubyVersion requirement `yaml:"required_ruby_version"`
RequiredRubygemsVersion requirement `yaml:"required_rubygems_version"` RequiredRubygemsVersion requirement `yaml:"required_rubygems_version"`
Requirements []interface{} `yaml:"requirements"` Requirements []any `yaml:"requirements"`
RubygemsVersion string `yaml:"rubygems_version"` RubygemsVersion string `yaml:"rubygems_version"`
SigningKey interface{} `yaml:"signing_key"` SigningKey any `yaml:"signing_key"`
SpecificationVersion int `yaml:"specification_version"` SpecificationVersion int `yaml:"specification_version"`
Summary string `yaml:"summary"` Summary string `yaml:"summary"`
TestFiles []interface{} `yaml:"test_files"` TestFiles []any `yaml:"test_files"`
} }
type requirement struct { type requirement struct {
Requirements [][]interface{} `yaml:"requirements"` Requirements [][]any `yaml:"requirements"`
} }
// AsVersionRequirement converts into []VersionRequirement // AsVersionRequirement converts into []VersionRequirement
@ -119,7 +119,7 @@ func (r requirement) AsVersionRequirement() []VersionRequirement {
if !ok { if !ok {
continue continue
} }
vm, ok := req[1].(map[string]interface{}) vm, ok := req[1].(map[string]any)
if !ok { if !ok {
continue continue
} }

View file

@ -85,11 +85,11 @@ type LoggerOptions struct {
Logger string Logger string
Writer string Writer string
Mode string Mode string
Config map[string]interface{} Config map[string]any
} }
// AddLogger adds a logger // AddLogger adds a logger
func AddLogger(ctx context.Context, logger, writer, mode string, config map[string]interface{}) ResponseExtra { func AddLogger(ctx context.Context, logger, writer, mode string, config map[string]any) ResponseExtra {
reqURL := setting.LocalURL + "api/internal/manager/add-logger" reqURL := setting.LocalURL + "api/internal/manager/add-logger"
req := newInternalRequest(ctx, reqURL, "POST", LoggerOptions{ req := newInternalRequest(ctx, reqURL, "POST", LoggerOptions{
Logger: logger, Logger: logger,

View file

@ -24,7 +24,7 @@ func (c *Context) GetParent() *Context {
} }
// Value is part of the interface for context.Context. We mostly defer to the internal context - but we return this in response to the ProcessContextKey // Value is part of the interface for context.Context. We mostly defer to the internal context - but we return this in response to the ProcessContextKey
func (c *Context) Value(key interface{}) interface{} { func (c *Context) Value(key any) any {
if key == ProcessContextKey { if key == ProcessContextKey {
return c return c
} }
@ -32,7 +32,7 @@ func (c *Context) Value(key interface{}) interface{} {
} }
// ProcessContextKey is the key under which process contexts are stored // ProcessContextKey is the key under which process contexts are stored
var ProcessContextKey interface{} = "process-context" var ProcessContextKey any = "process-context"
// GetContext will return a process context if one exists // GetContext will return a process context if one exists
func GetContext(ctx context.Context) *Context { func GetContext(ctx context.Context) *Context {

View file

@ -17,11 +17,11 @@ import (
type DBStore struct { type DBStore struct {
sid string sid string
lock sync.RWMutex lock sync.RWMutex
data map[interface{}]interface{} data map[any]any
} }
// NewDBStore creates and returns a DB session store. // NewDBStore creates and returns a DB session store.
func NewDBStore(sid string, kv map[interface{}]interface{}) *DBStore { func NewDBStore(sid string, kv map[any]any) *DBStore {
return &DBStore{ return &DBStore{
sid: sid, sid: sid,
data: kv, data: kv,
@ -29,7 +29,7 @@ func NewDBStore(sid string, kv map[interface{}]interface{}) *DBStore {
} }
// Set sets value to given key in session. // Set sets value to given key in session.
func (s *DBStore) Set(key, val interface{}) error { func (s *DBStore) Set(key, val any) error {
s.lock.Lock() s.lock.Lock()
defer s.lock.Unlock() defer s.lock.Unlock()
@ -38,7 +38,7 @@ func (s *DBStore) Set(key, val interface{}) error {
} }
// Get gets value by given key in session. // Get gets value by given key in session.
func (s *DBStore) Get(key interface{}) interface{} { func (s *DBStore) Get(key any) any {
s.lock.RLock() s.lock.RLock()
defer s.lock.RUnlock() defer s.lock.RUnlock()
@ -46,7 +46,7 @@ func (s *DBStore) Get(key interface{}) interface{} {
} }
// Delete delete a key from session. // Delete delete a key from session.
func (s *DBStore) Delete(key interface{}) error { func (s *DBStore) Delete(key any) error {
s.lock.Lock() s.lock.Lock()
defer s.lock.Unlock() defer s.lock.Unlock()
@ -79,7 +79,7 @@ func (s *DBStore) Flush() error {
s.lock.Lock() s.lock.Lock()
defer s.lock.Unlock() defer s.lock.Unlock()
s.data = make(map[interface{}]interface{}) s.data = make(map[any]any)
return nil return nil
} }
@ -102,9 +102,9 @@ func (p *DBProvider) Read(sid string) (session.RawStore, error) {
return nil, err return nil, err
} }
var kv map[interface{}]interface{} var kv map[any]any
if len(s.Data) == 0 || s.Expiry.Add(p.maxLifetime) <= timeutil.TimeStampNow() { if len(s.Data) == 0 || s.Expiry.Add(p.maxLifetime) <= timeutil.TimeStampNow() {
kv = make(map[interface{}]interface{}) kv = make(map[any]any)
} else { } else {
kv, err = session.DecodeGob(s.Data) kv, err = session.DecodeGob(s.Data)
if err != nil { if err != nil {
@ -136,9 +136,9 @@ func (p *DBProvider) Regenerate(oldsid, sid string) (_ session.RawStore, err err
return nil, err return nil, err
} }
var kv map[interface{}]interface{} var kv map[any]any
if len(s.Data) == 0 || s.Expiry.Add(p.maxLifetime) <= timeutil.TimeStampNow() { if len(s.Data) == 0 || s.Expiry.Add(p.maxLifetime) <= timeutil.TimeStampNow() {
kv = make(map[interface{}]interface{}) kv = make(map[any]any)
} else { } else {
kv, err = session.DecodeGob(s.Data) kv, err = session.DecodeGob(s.Data)
if err != nil { if err != nil {

View file

@ -35,11 +35,11 @@ type RedisStore struct {
prefix, sid string prefix, sid string
duration time.Duration duration time.Duration
lock sync.RWMutex lock sync.RWMutex
data map[interface{}]interface{} data map[any]any
} }
// NewRedisStore creates and returns a redis session store. // NewRedisStore creates and returns a redis session store.
func NewRedisStore(c redis.UniversalClient, prefix, sid string, dur time.Duration, kv map[interface{}]interface{}) *RedisStore { func NewRedisStore(c redis.UniversalClient, prefix, sid string, dur time.Duration, kv map[any]any) *RedisStore {
return &RedisStore{ return &RedisStore{
c: c, c: c,
prefix: prefix, prefix: prefix,
@ -50,7 +50,7 @@ func NewRedisStore(c redis.UniversalClient, prefix, sid string, dur time.Duratio
} }
// Set sets value to given key in session. // Set sets value to given key in session.
func (s *RedisStore) Set(key, val interface{}) error { func (s *RedisStore) Set(key, val any) error {
s.lock.Lock() s.lock.Lock()
defer s.lock.Unlock() defer s.lock.Unlock()
@ -59,7 +59,7 @@ func (s *RedisStore) Set(key, val interface{}) error {
} }
// Get gets value by given key in session. // Get gets value by given key in session.
func (s *RedisStore) Get(key interface{}) interface{} { func (s *RedisStore) Get(key any) any {
s.lock.RLock() s.lock.RLock()
defer s.lock.RUnlock() defer s.lock.RUnlock()
@ -67,7 +67,7 @@ func (s *RedisStore) Get(key interface{}) interface{} {
} }
// Delete delete a key from session. // Delete delete a key from session.
func (s *RedisStore) Delete(key interface{}) error { func (s *RedisStore) Delete(key any) error {
s.lock.Lock() s.lock.Lock()
defer s.lock.Unlock() defer s.lock.Unlock()
@ -100,7 +100,7 @@ func (s *RedisStore) Flush() error {
s.lock.Lock() s.lock.Lock()
defer s.lock.Unlock() defer s.lock.Unlock()
s.data = make(map[interface{}]interface{}) s.data = make(map[any]any)
return nil return nil
} }
@ -141,13 +141,13 @@ func (p *RedisProvider) Read(sid string) (session.RawStore, error) {
} }
} }
var kv map[interface{}]interface{} var kv map[any]any
kvs, err := p.c.Get(graceful.GetManager().HammerContext(), psid).Result() kvs, err := p.c.Get(graceful.GetManager().HammerContext(), psid).Result()
if err != nil { if err != nil {
return nil, err return nil, err
} }
if len(kvs) == 0 { if len(kvs) == 0 {
kv = make(map[interface{}]interface{}) kv = make(map[any]any)
} else { } else {
kv, err = session.DecodeGob([]byte(kvs)) kv, err = session.DecodeGob([]byte(kvs))
if err != nil { if err != nil {
@ -197,9 +197,9 @@ func (p *RedisProvider) Regenerate(oldsid, sid string) (_ session.RawStore, err
return nil, err return nil, err
} }
var kv map[interface{}]interface{} var kv map[any]any
if len(kvs) == 0 { if len(kvs) == 0 {
kv = make(map[interface{}]interface{}) kv = make(map[any]any)
} else { } else {
kv, err = session.DecodeGob([]byte(kvs)) kv, err = session.DecodeGob([]byte(kvs))
if err != nil { if err != nil {

View file

@ -11,9 +11,9 @@ import (
// Store represents a session store // Store represents a session store
type Store interface { type Store interface {
Get(interface{}) interface{} Get(any) any
Set(interface{}, interface{}) error Set(any, any) error
Delete(interface{}) error Delete(any) error
} }
// RegenerateSession regenerates the underlying session and returns the new store // RegenerateSession regenerates the underlying session and returns the new store

View file

@ -62,7 +62,7 @@ func (o *VirtualSessionProvider) Read(sid string) (session.RawStore, error) {
if o.provider.Exist(sid) { if o.provider.Exist(sid) {
return o.provider.Read(sid) return o.provider.Read(sid)
} }
kv := make(map[interface{}]interface{}) kv := make(map[any]any)
kv["_old_uid"] = "0" kv["_old_uid"] = "0"
return NewVirtualStore(o, sid, kv), nil return NewVirtualStore(o, sid, kv), nil
} }
@ -107,12 +107,12 @@ type VirtualStore struct {
p *VirtualSessionProvider p *VirtualSessionProvider
sid string sid string
lock sync.RWMutex lock sync.RWMutex
data map[interface{}]interface{} data map[any]any
released bool released bool
} }
// NewVirtualStore creates and returns a virtual session store. // NewVirtualStore creates and returns a virtual session store.
func NewVirtualStore(p *VirtualSessionProvider, sid string, kv map[interface{}]interface{}) *VirtualStore { func NewVirtualStore(p *VirtualSessionProvider, sid string, kv map[any]any) *VirtualStore {
return &VirtualStore{ return &VirtualStore{
p: p, p: p,
sid: sid, sid: sid,
@ -121,7 +121,7 @@ func NewVirtualStore(p *VirtualSessionProvider, sid string, kv map[interface{}]i
} }
// Set sets value to given key in session. // Set sets value to given key in session.
func (s *VirtualStore) Set(key, val interface{}) error { func (s *VirtualStore) Set(key, val any) error {
s.lock.Lock() s.lock.Lock()
defer s.lock.Unlock() defer s.lock.Unlock()
@ -130,7 +130,7 @@ func (s *VirtualStore) Set(key, val interface{}) error {
} }
// Get gets value by given key in session. // Get gets value by given key in session.
func (s *VirtualStore) Get(key interface{}) interface{} { func (s *VirtualStore) Get(key any) any {
s.lock.RLock() s.lock.RLock()
defer s.lock.RUnlock() defer s.lock.RUnlock()
@ -138,7 +138,7 @@ func (s *VirtualStore) Get(key interface{}) interface{} {
} }
// Delete delete a key from session. // Delete delete a key from session.
func (s *VirtualStore) Delete(key interface{}) error { func (s *VirtualStore) Delete(key any) error {
s.lock.Lock() s.lock.Lock()
defer s.lock.Unlock() defer s.lock.Unlock()
@ -192,6 +192,6 @@ func (s *VirtualStore) Flush() error {
s.lock.Lock() s.lock.Lock()
defer s.lock.Unlock() defer s.lock.Unlock()
s.data = make(map[interface{}]interface{}) s.data = make(map[any]any)
return nil return nil
} }

View file

@ -6,11 +6,11 @@ package setting
import "reflect" import "reflect"
// GetCronSettings maps the cron subsection to the provided config // GetCronSettings maps the cron subsection to the provided config
func GetCronSettings(name string, config interface{}) (interface{}, error) { func GetCronSettings(name string, config any) (any, error) {
return getCronSettings(CfgProvider, name, config) return getCronSettings(CfgProvider, name, config)
} }
func getCronSettings(rootCfg ConfigProvider, name string, config interface{}) (interface{}, error) { func getCronSettings(rootCfg ConfigProvider, name string, config any) (any, error) {
if err := rootCfg.Section("cron." + name).MapTo(config); err != nil { if err := rootCfg.Section("cron." + name).MapTo(config); err != nil {
return config, err return config, err
} }

View file

@ -30,7 +30,7 @@ func initLoggersByConfig(t *testing.T, config string) (*log.LoggerManager, func(
return manager, manager.Close return manager, manager.Close
} }
func toJSON(v interface{}) string { func toJSON(v any) string {
b, _ := json.MarshalIndent(v, "", "\t") b, _ := json.MarshalIndent(v, "", "\t")
return string(b) return string(b)
} }

View file

@ -173,7 +173,7 @@ func (m minioFileInfo) Mode() os.FileMode {
return os.ModePerm return os.ModePerm
} }
func (m minioFileInfo) Sys() interface{} { func (m minioFileInfo) Sys() any {
return nil return nil
} }

View file

@ -20,7 +20,7 @@ var ErrURLNotSupported = errors.New("url method not supported")
// ErrInvalidConfiguration is called when there is invalid configuration for a storage // ErrInvalidConfiguration is called when there is invalid configuration for a storage
type ErrInvalidConfiguration struct { type ErrInvalidConfiguration struct {
cfg interface{} cfg any
err error err error
} }

View file

@ -140,10 +140,10 @@ const (
// IssueFormField represents a form field // IssueFormField represents a form field
// swagger:model // swagger:model
type IssueFormField struct { type IssueFormField struct {
Type IssueFormFieldType `json:"type" yaml:"type"` Type IssueFormFieldType `json:"type" yaml:"type"`
ID string `json:"id" yaml:"id"` ID string `json:"id" yaml:"id"`
Attributes map[string]interface{} `json:"attributes" yaml:"attributes"` Attributes map[string]any `json:"attributes" yaml:"attributes"`
Validations map[string]interface{} `json:"validations" yaml:"validations"` Validations map[string]any `json:"validations" yaml:"validations"`
} }
// IssueTemplate represents an issue template for a repository // IssueTemplate represents an issue template for a repository

View file

@ -166,7 +166,7 @@ type FilesResponse struct {
// FileDeleteResponse contains information about a repo's file that was deleted // FileDeleteResponse contains information about a repo's file that was deleted
type FileDeleteResponse struct { type FileDeleteResponse struct {
Content interface{} `json:"content"` // to be set to nil Content any `json:"content"` // to be set to nil
Commit *FileCommitResponse `json:"commit"` Commit *FileCommitResponse `json:"commit"`
Verification *PayloadCommitVerification `json:"verification"` Verification *PayloadCommitVerification `json:"verification"`
} }

View file

@ -9,10 +9,10 @@ import (
// WatchInfo represents an API watch status of one repository // WatchInfo represents an API watch status of one repository
type WatchInfo struct { type WatchInfo struct {
Subscribed bool `json:"subscribed"` Subscribed bool `json:"subscribed"`
Ignored bool `json:"ignored"` Ignored bool `json:"ignored"`
Reason interface{} `json:"reason"` Reason any `json:"reason"`
CreatedAt time.Time `json:"created_at"` CreatedAt time.Time `json:"created_at"`
URL string `json:"url"` URL string `json:"url"`
RepositoryURL string `json:"repository_url"` RepositoryURL string `json:"repository_url"`
} }

Some files were not shown because too many files have changed in this diff Show more