1
1
mirror of https://github.com/go-gitea/gitea synced 2025-08-06 09:38:21 +00:00

Run gopls modernize on codebase (#34751)

Recent modernize fixes:
https://github.com/golang/tools/commits/master/gopls/internal/analysis/modernize
This commit is contained in:
silverwind
2025-06-18 03:48:09 +02:00
committed by GitHub
parent 71e4740946
commit 1f35435b81
190 changed files with 369 additions and 696 deletions

View File

@@ -14,11 +14,11 @@ type CustomURLMapping struct {
// CustomURLSettings describes the urls values and availability to use when customizing OAuth2 provider URLs
type CustomURLSettings struct {
AuthURL Attribute `json:",omitempty"`
TokenURL Attribute `json:",omitempty"`
ProfileURL Attribute `json:",omitempty"`
EmailURL Attribute `json:",omitempty"`
Tenant Attribute `json:",omitempty"`
AuthURL Attribute
TokenURL Attribute
ProfileURL Attribute
EmailURL Attribute
Tenant Attribute
}
// Attribute describes the availability, and required status for a custom url configuration

View File

@@ -9,6 +9,7 @@ import (
"fmt"
"net/http"
"net/url"
"slices"
"strconv"
"strings"
@@ -364,11 +365,5 @@ func (ctx *APIContext) IsUserRepoAdmin() bool {
// IsUserRepoWriter returns true if current user has "write" privilege in current repo
func (ctx *APIContext) IsUserRepoWriter(unitTypes []unit.Type) bool {
for _, unitType := range unitTypes {
if ctx.Repo.CanWrite(unitType) {
return true
}
}
return false
return slices.ContainsFunc(unitTypes, ctx.Repo.CanWrite)
}

View File

@@ -5,6 +5,7 @@ package context
import (
"net/http"
"slices"
auth_model "code.gitea.io/gitea/models/auth"
repo_model "code.gitea.io/gitea/models/repo"
@@ -34,10 +35,8 @@ func CanWriteToBranch() func(ctx *Context) {
// RequireUnitWriter returns a middleware for requiring repository write to one of the unit permission
func RequireUnitWriter(unitTypes ...unit.Type) func(ctx *Context) {
return func(ctx *Context) {
for _, unitType := range unitTypes {
if ctx.Repo.CanWrite(unitType) {
return
}
if slices.ContainsFunc(unitTypes, ctx.Repo.CanWrite) {
return
}
ctx.NotFound(nil)
}

View File

@@ -39,7 +39,7 @@ func Verify(buf []byte, fileName, allowedTypesStr string) error {
allowedTypesStr = strings.ReplaceAll(allowedTypesStr, "|", ",") // compat for old config format
allowedTypes := []string{}
for _, entry := range strings.Split(allowedTypesStr, ",") {
for entry := range strings.SplitSeq(allowedTypesStr, ",") {
entry = strings.ToLower(strings.TrimSpace(entry))
if entry != "" {
allowedTypes = append(allowedTypes, entry)

View File

@@ -147,7 +147,7 @@ func TestRepoActions(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
_ = db.TruncateBeans(db.DefaultContext, &activities_model.Action{})
for i := 0; i < 3; i++ {
for i := range 3 {
_ = db.Insert(db.DefaultContext, &activities_model.Action{
UserID: 2 + int64(i),
ActUserID: 2,

View File

@@ -134,7 +134,7 @@ func createCsvDiffSingle(reader *csv.Reader, celltype TableDiffCellType) ([]*Tab
return nil, err
}
cells := make([]*TableDiffCell, len(row))
for j := 0; j < len(row); j++ {
for j := range row {
if celltype == TableDiffCellDel {
cells[j] = &TableDiffCell{LeftCell: row[j], Type: celltype}
} else {
@@ -365,11 +365,11 @@ func getColumnMapping(baseCSVReader, headCSVReader *csvReader) ([]int, []int) {
}
// Loops through the baseRow and see if there is a match in the head row
for i := 0; i < len(baseRow); i++ {
for i := range baseRow {
base2HeadColMap[i] = unmappedColumn
baseCell, err := getCell(baseRow, i)
if err == nil {
for j := 0; j < len(headRow); j++ {
for j := range headRow {
if head2BaseColMap[j] == -1 {
headCell, err := getCell(headRow, j)
if err == nil && baseCell == headCell {
@@ -390,7 +390,7 @@ func getColumnMapping(baseCSVReader, headCSVReader *csvReader) ([]int, []int) {
// tryMapColumnsByContent tries to map missing columns by the content of the first lines.
func tryMapColumnsByContent(baseCSVReader *csvReader, base2HeadColMap []int, headCSVReader *csvReader, head2BaseColMap []int) {
for i := 0; i < len(base2HeadColMap); i++ {
for i := range base2HeadColMap {
headStart := 0
for base2HeadColMap[i] == unmappedColumn && headStart < len(head2BaseColMap) {
if head2BaseColMap[headStart] == unmappedColumn {
@@ -424,7 +424,7 @@ func getCell(row []string, column int) (string, error) {
// countUnmappedColumns returns the count of unmapped columns.
func countUnmappedColumns(mapping []int) int {
count := 0
for i := 0; i < len(mapping); i++ {
for i := range mapping {
if mapping[i] == unmappedColumn {
count++
}

View File

@@ -540,10 +540,7 @@ func ParsePatch(ctx context.Context, maxLines, maxLineCharacters, maxFiles int,
// OK let's set a reasonable buffer size.
// This should be at least the size of maxLineCharacters or 4096 whichever is larger.
readerSize := maxLineCharacters
if readerSize < 4096 {
readerSize = 4096
}
readerSize := max(maxLineCharacters, 4096)
input := bufio.NewReaderSize(reader, readerSize)
line, err := input.ReadString('\n')

View File

@@ -416,7 +416,7 @@ index 0000000..6bb8f39
`
diffBuilder.WriteString(diff)
for i := 0; i < 35; i++ {
for i := range 35 {
diffBuilder.WriteString("+line" + strconv.Itoa(i) + "\n")
}
diff = diffBuilder.String()
@@ -453,11 +453,11 @@ index 0000000..6bb8f39
diffBuilder.Reset()
diffBuilder.WriteString(diff)
for i := 0; i < 33; i++ {
for i := range 33 {
diffBuilder.WriteString("+line" + strconv.Itoa(i) + "\n")
}
diffBuilder.WriteString("+line33")
for i := 0; i < 512; i++ {
for range 512 {
diffBuilder.WriteString("0123456789ABCDEF")
}
diffBuilder.WriteByte('\n')

View File

@@ -203,7 +203,6 @@ index 0000000..68972a9
}
for _, testcase := range tests {
testcase := testcase
t.Run(testcase.name, func(t *testing.T) {
diff, err := ParsePatch(db.DefaultContext, setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffLineCharacters, setting.Git.MaxGitDiffFiles, strings.NewReader(testcase.gitdiff), "")
assert.NoError(t, err)

View File

@@ -74,10 +74,7 @@ func GetListLockHandler(ctx *context.Context) {
}
ctx.Resp.Header().Set("Content-Type", lfs_module.MediaType)
cursor := ctx.FormInt("cursor")
if cursor < 0 {
cursor = 0
}
cursor := max(ctx.FormInt("cursor"), 0)
limit := ctx.FormInt("limit")
if limit > setting.LFS.LocksPagingNum && setting.LFS.LocksPagingNum > 0 {
limit = setting.LFS.LocksPagingNum
@@ -239,10 +236,7 @@ func VerifyLockHandler(ctx *context.Context) {
ctx.Resp.Header().Set("Content-Type", lfs_module.MediaType)
cursor := ctx.FormInt("cursor")
if cursor < 0 {
cursor = 0
}
cursor := max(ctx.FormInt("cursor"), 0)
limit := ctx.FormInt("limit")
if limit > setting.LFS.LocksPagingNum && setting.LFS.LocksPagingNum > 0 {
limit = setting.LFS.LocksPagingNum

View File

@@ -11,6 +11,7 @@ import (
"errors"
"fmt"
"io"
"maps"
"net/http"
"net/url"
"path"
@@ -480,9 +481,7 @@ func buildObjectResponse(rc *requestContext, pointer lfs_module.Pointer, downloa
rep.Actions["upload"] = &lfs_module.Link{Href: rc.UploadLink(pointer), Header: header}
verifyHeader := make(map[string]string)
for key, value := range header {
verifyHeader[key] = value
}
maps.Copy(verifyHeader, header)
// This is only needed to workaround https://github.com/git-lfs/git-lfs/issues/3662
verifyHeader["Accept"] = lfs_module.AcceptHeader

View File

@@ -108,9 +108,9 @@ func extractMailHeaderAndContent(t *testing.T, mail string) (map[string]string,
}
content := strings.TrimSpace("boundary=" + parts[1])
hParts := strings.Split(parts[0], "\n")
hParts := strings.SplitSeq(parts[0], "\n")
for _, hPart := range hParts {
for hPart := range hParts {
parts := strings.SplitN(hPart, ":", 2)
hk := strings.TrimSpace(parts[0])
if hk != "" {

View File

@@ -155,10 +155,7 @@ func (c *CodeCommitDownloader) GetPullRequests(ctx context.Context, page, perPag
}
startIndex := (page - 1) * perPage
endIndex := page * perPage
if endIndex > len(allPullRequestIDs) {
endIndex = len(allPullRequestIDs)
}
endIndex := min(page*perPage, len(allPullRequestIDs))
batch := allPullRequestIDs[startIndex:endIndex]
prs := make([]*base.PullRequest, 0, len(batch))

View File

@@ -89,8 +89,8 @@ func NewGithubDownloaderV3(_ context.Context, baseURL, userName, password, token
}
if token != "" {
tokens := strings.Split(token, ",")
for _, token := range tokens {
tokens := strings.SplitSeq(token, ",")
for token := range tokens {
token = strings.TrimSpace(token)
ts := oauth2.StaticTokenSource(
&oauth2.Token{AccessToken: token},

View File

@@ -85,7 +85,7 @@ func GrantAdditionalScopes(grantScopes string) auth.AccessTokenScope {
}
var accessScopes []string // the scopes for access control, but not for general information
for _, scope := range strings.Split(grantScopes, " ") {
for scope := range strings.SplitSeq(grantScopes, " ") {
if scope != "" && !slices.Contains(generalScopesSupported, scope) {
accessScopes = append(accessScopes, scope)
}

View File

@@ -204,7 +204,7 @@ func TestIncludesAllRepositoriesTeams(t *testing.T) {
// Create repos.
repoIDs := make([]int64, 0)
for i := 0; i < 3; i++ {
for i := range 3 {
r, err := repo_service.CreateRepositoryDirectly(db.DefaultContext, user, org.AsUser(),
repo_service.CreateRepoOptions{Name: fmt.Sprintf("repo-%d", i)}, true)
assert.NoError(t, err, "CreateRepository %d", i)

View File

@@ -34,12 +34,7 @@ func parseEVR(evr string) (epoch, version, release string) {
func compareSegments(a, b []string) int {
lenA, lenB := len(a), len(b)
var l int
if lenA > lenB {
l = lenB
} else {
l = lenA
}
l := min(lenA, lenB)
for i := 0; i < l; i++ {
if r := compare(a[i], b[i]); r != 0 {
return r

View File

@@ -8,6 +8,7 @@ import (
"context"
"errors"
"fmt"
"maps"
"os"
"path/filepath"
"regexp"
@@ -95,9 +96,7 @@ func getMergeMessage(ctx context.Context, baseGitRepo *git.Repository, pr *issue
vars["HeadRepoOwnerName"] = pr.HeadRepo.OwnerName
vars["HeadRepoName"] = pr.HeadRepo.Name
}
for extraKey, extraValue := range extraVars {
vars[extraKey] = extraValue
}
maps.Copy(vars, extraVars)
refs, err := pr.ResolveCrossReferences(ctx)
if err == nil {
closeIssueIndexes := make([]string, 0, len(refs))

View File

@@ -29,7 +29,7 @@ func TestCheckUnadoptedRepositories_Add(t *testing.T) {
}
total := 30
for i := 0; i < total; i++ {
for range total {
unadopted.add("something")
}

View File

@@ -24,7 +24,7 @@ import (
)
func getCacheKey(repoID int64, brancheName string) string {
hashBytes := sha256.Sum256([]byte(fmt.Sprintf("%d:%s", repoID, brancheName)))
hashBytes := sha256.Sum256(fmt.Appendf(nil, "%d:%s", repoID, brancheName))
return fmt.Sprintf("commit_status:%x", hashBytes)
}

View File

@@ -100,8 +100,8 @@ func prepareRepoCommit(ctx context.Context, repo *repo_model.Repository, tmpDir
// .gitignore
if len(opts.Gitignores) > 0 {
var buf bytes.Buffer
names := strings.Split(opts.Gitignores, ",")
for _, name := range names {
names := strings.SplitSeq(opts.Gitignores, ",")
for name := range names {
data, err = options.Gitignore(name)
if err != nil {
return fmt.Errorf("GetRepoInitFile[%s]: %w", name, err)

View File

@@ -139,7 +139,7 @@ func CleanUploadFileName(name string) string {
// Rebase the filename
name = util.PathJoinRel(name)
// Git disallows any filenames to have a .git directory in them.
for _, part := range strings.Split(name, "/") {
for part := range strings.SplitSeq(name, "/") {
if strings.ToLower(part) == ".git" {
return ""
}

View File

@@ -128,7 +128,7 @@ func (t *TemporaryUploadRepository) LsFiles(ctx context.Context, filenames ...st
}
fileList := make([]string, 0, len(filenames))
for _, line := range bytes.Split(stdOut.Bytes(), []byte{'\000'}) {
for line := range bytes.SplitSeq(stdOut.Bytes(), []byte{'\000'}) {
fileList = append(fileList, string(line))
}

View File

@@ -94,11 +94,7 @@ func GetTreeBySHA(ctx context.Context, repo *repo_model.Repository, gitRepo *git
if len(entries) > perPage {
tree.Truncated = true
}
if rangeStart+perPage < len(entries) {
rangeEnd = rangeStart + perPage
} else {
rangeEnd = len(entries)
}
rangeEnd = min(rangeStart+perPage, len(entries))
tree.Entries = make([]api.GitEntry, rangeEnd-rangeStart)
for e := rangeStart; e < rangeEnd; e++ {
i := e - rangeStart

View File

@@ -8,6 +8,7 @@ import (
"fmt"
"io"
"path"
"slices"
"strings"
"time"
@@ -203,13 +204,7 @@ func ChangeRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *use
}
// Find the file we want to delete in the index
inFilelist := false
for _, indexFile := range filesInIndex {
if indexFile == file.TreePath {
inFilelist = true
break
}
}
inFilelist := slices.Contains(filesInIndex, file.TreePath)
if !inFilelist {
return nil, ErrRepoFileDoesNotExist{
Path: file.TreePath,
@@ -467,11 +462,9 @@ func CreateUpdateRenameFile(ctx context.Context, t *TemporaryUploadRepository, f
}
// If is a new file (not updating) then the given path shouldn't exist
if file.Operation == "create" {
for _, indexFile := range filesInIndex {
if indexFile == file.TreePath {
return ErrRepoFileAlreadyExists{
Path: file.TreePath,
}
if slices.Contains(filesInIndex, file.TreePath) {
return ErrRepoFileAlreadyExists{
Path: file.TreePath,
}
}
}

View File

@@ -232,8 +232,8 @@ func newRefsFromRefNames(refNames []byte) []git.Reference {
continue
}
refName := string(refNameBytes)
if strings.HasPrefix(refName, "tag: ") {
refName = strings.TrimPrefix(refName, "tag: ")
if after, ok := strings.CutPrefix(refName, "tag: "); ok {
refName = after
} else {
refName = strings.TrimPrefix(refName, "HEAD -> ")
}

View File

@@ -6,6 +6,7 @@ package gitgraph
import (
"bytes"
"fmt"
"slices"
"strings"
"testing"
@@ -117,13 +118,7 @@ func TestReleaseUnusedColors(t *testing.T) {
if parser.firstAvailable == -1 {
// All in use
for _, color := range parser.availableColors {
found := false
for _, oldColor := range parser.oldColors {
if oldColor == color {
found = true
break
}
}
found := slices.Contains(parser.oldColors, color)
if !found {
t.Errorf("In testcase:\n%d\t%d\t%d %d =>\n%d\t%d\t%d %d: %d should be available but is not",
testcase.availableColors,
@@ -141,13 +136,7 @@ func TestReleaseUnusedColors(t *testing.T) {
// Some in use
for i := parser.firstInUse; i != parser.firstAvailable; i = (i + 1) % len(parser.availableColors) {
color := parser.availableColors[i]
found := false
for _, oldColor := range parser.oldColors {
if oldColor == color {
found = true
break
}
}
found := slices.Contains(parser.oldColors, color)
if !found {
t.Errorf("In testcase:\n%d\t%d\t%d %d =>\n%d\t%d\t%d %d: %d should be available but is not",
testcase.availableColors,
@@ -163,13 +152,7 @@ func TestReleaseUnusedColors(t *testing.T) {
}
for i := parser.firstAvailable; i != parser.firstInUse; i = (i + 1) % len(parser.availableColors) {
color := parser.availableColors[i]
found := false
for _, oldColor := range parser.oldColors {
if oldColor == color {
found = true
break
}
}
found := slices.Contains(parser.oldColors, color)
if found {
t.Errorf("In testcase:\n%d\t%d\t%d %d =>\n%d\t%d\t%d %d: %d should not be available but is",
testcase.availableColors,

View File

@@ -70,11 +70,11 @@ func parseThemeMetaInfoToMap(cssContent string) map[string]string {
m := map[string]string{}
for _, item := range matchedItems {
v := item[3]
if strings.HasPrefix(v, `"`) {
v = strings.TrimSuffix(strings.TrimPrefix(v, `"`), `"`)
if after, ok := strings.CutPrefix(v, `"`); ok {
v = strings.TrimSuffix(after, `"`)
v = strings.ReplaceAll(v, `\"`, `"`)
} else if strings.HasPrefix(v, `'`) {
v = strings.TrimSuffix(strings.TrimPrefix(v, `'`), `'`)
} else if after, ok := strings.CutPrefix(v, `'`); ok {
v = strings.TrimSuffix(after, `'`)
v = strings.ReplaceAll(v, `\'`, `'`)
}
m[item[2]] = v

View File

@@ -116,9 +116,9 @@ func TestGitPathToWebPath(t *testing.T) {
func TestUserWebGitPathConsistency(t *testing.T) {
maxLen := 20
b := make([]byte, maxLen)
for i := 0; i < 1000; i++ {
for range 1000 {
l := rand.Intn(maxLen)
for j := 0; j < l; j++ {
for j := range l {
r := rand.Intn(0x80-0x20) + 0x20
b[j] = byte(r)
}