Merge branch 'main' into tippyfix

This commit is contained in:
silverwind 2024-04-29 20:58:12 +02:00 committed by GitHub
commit 0efb41401f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
80 changed files with 358 additions and 1321 deletions

View File

@ -4,6 +4,7 @@ reportUnusedDisableDirectives: true
ignorePatterns:
- /web_src/js/vendor
- /web_src/fomantic
- /public/assets/js
parserOptions:
sourceType: module

View File

@ -1,25 +0,0 @@
name: disk-clean
on:
workflow_call:
jobs:
triage:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Free Disk Space (Ubuntu)
uses: jlumbroso/free-disk-space@main
with:
# this might remove tools that are actually needed,
# if set to "true" but frees about 6 GB
tool-cache: false
# all of these default to true, but feel free to set to
# "false" if necessary for your workflow
android: true
dotnet: true
haskell: true
large-packages: false
docker-images: false
swap-storage: true

View File

@ -9,8 +9,6 @@ concurrency:
cancel-in-progress: true
jobs:
disk-clean:
uses: ./.github/workflows/disk-clean.yml
nightly-binary:
runs-on: nscloud
steps:

View File

@ -778,7 +778,7 @@ generate-backend: $(TAGS_PREREQ) generate-go
.PHONY: generate-go
generate-go: $(TAGS_PREREQ)
@echo "Running go generate..."
@CC= GOOS= GOARCH= $(GO) generate -tags '$(TAGS)' ./...
@CC= GOOS= GOARCH= CGO_ENABLED=0 $(GO) generate -tags '$(TAGS)' ./...
.PHONY: security-check
security-check:

View File

@ -540,8 +540,8 @@
"licenseText": "Copyright (c) 2011 The Snappy-Go Authors. All rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are\nmet:\n\n * Redistributions of source code must retain the above copyright\nnotice, this list of conditions and the following disclaimer.\n * Redistributions in binary form must reproduce the above\ncopyright notice, this list of conditions and the following disclaimer\nin the documentation and/or other materials provided with the\ndistribution.\n * Neither the name of Google Inc. nor the names of its\ncontributors may be used to endorse or promote products derived from\nthis software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\nLIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\nA PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\nOWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\nSPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\nLIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\nDATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\nTHEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n"
},
{
"name": "github.com/google/go-github/v57/github",
"path": "github.com/google/go-github/v57/github/LICENSE",
"name": "github.com/google/go-github/v61/github",
"path": "github.com/google/go-github/v61/github/LICENSE",
"licenseText": "Copyright (c) 2013 The go-github AUTHORS. All rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are\nmet:\n\n * Redistributions of source code must retain the above copyright\nnotice, this list of conditions and the following disclaimer.\n * Redistributions in binary form must reproduce the above\ncopyright notice, this list of conditions and the following disclaimer\nin the documentation and/or other materials provided with the\ndistribution.\n * Neither the name of Google Inc. nor the names of its\ncontributors may be used to endorse or promote products derived from\nthis software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\nLIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\nA PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\nOWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\nSPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\nLIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\nDATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\nTHEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n"
},
{

View File

@ -17,7 +17,7 @@ import (
"strings"
"syscall"
"github.com/google/go-github/v57/github"
"github.com/google/go-github/v61/github"
"github.com/urfave/cli/v2"
"gopkg.in/yaml.v3"
)

View File

@ -214,9 +214,9 @@ The following configuration set `Content-Type: application/vnd.android.package-a
- `SITEMAP_PAGING_NUM`: **20**: Number of items that are displayed in a single subsitemap.
- `GRAPH_MAX_COMMIT_NUM`: **100**: Number of maximum commits shown in the commit graph.
- `CODE_COMMENT_LINES`: **4**: Number of line of codes shown for a code comment.
- `DEFAULT_THEME`: **gitea-auto**: Set the default theme for the Gitea installation, custom themes could be provided by "{CustomPath}/public/assets/css/theme-*.css".
- `DEFAULT_THEME`: **gitea-auto**: Set the default theme for the Gitea installation, custom themes could be provided by `{CustomPath}/public/assets/css/theme-*.css`.
- `SHOW_USER_EMAIL`: **true**: Whether the email of the user should be shown in the Explore Users page.
- `THEMES`: **_empty_**: All available themes by "{CustomPath}/public/assets/css/theme-*.css". Allow users select personalized themes.
- `THEMES`: **_empty_**: All available themes by `{CustomPath}/public/assets/css/theme-*.css`. Allow users select personalized themes.
- `MAX_DISPLAY_FILE_SIZE`: **8388608**: Max size of files to be displayed (default is 8MiB)
- `AMBIGUOUS_UNICODE_DETECTION`: **true**: Detect ambiguous unicode characters in file contents and show warnings on the UI
- `REACTIONS`: All available reactions users can choose on issues/prs and comments

View File

@ -212,9 +212,9 @@ menu:
- `SITEMAP_PAGING_NUM`: **20**: 在单个子SiteMap中显示的项数。
- `GRAPH_MAX_COMMIT_NUM`: **100**: 提交图中显示的最大commit数量。
- `CODE_COMMENT_LINES`: **4**: 在代码评论中能够显示的最大代码行数。
- `DEFAULT_THEME`: **gitea-auto**: 在Gitea安装时候设置的默认主题自定义的主题可以通过 "{CustomPath}/public/assets/css/theme-*.css" 提供。
- `DEFAULT_THEME`: **gitea-auto**: 在Gitea安装时候设置的默认主题自定义的主题可以通过 `{CustomPath}/public/assets/css/theme-*.css` 提供。
- `SHOW_USER_EMAIL`: **true**: 用户的电子邮件是否应该显示在`Explore Users`页面中。
- `THEMES`: **_empty_**: 所有可用的主题(由 "{CustomPath}/public/assets/css/theme-*.css" 提供)。允许用户选择个性化的主题,
- `THEMES`: **_empty_**: 所有可用的主题(由 `{CustomPath}/public/assets/css/theme-*.css` 提供)。允许用户选择个性化的主题,
- `MAX_DISPLAY_FILE_SIZE`: **8388608**: 能够显示文件的最大大小默认为8MiB
- `REACTIONS`: 用户可以在问题Issue、Pull RequestPR以及评论中选择的所有可选的反应。
这些值可以是表情符号别名(例如::smile:或Unicode表情符号。

View File

@ -58,7 +58,7 @@ The repository now gets mirrored periodically to the remote repository. You can
To set up a mirror from Gitea to GitHub, you need to follow these steps:
1. Create a [GitHub personal access token](https://docs.github.com/en/github/authenticating-to-github/creating-a-personal-access-token) with the *public_repo* box checked. Also check the **workflow** checkbox in case your repo using act for continuous integration.
1. Create a [GitHub personal access token](https://docs.github.com/en/github/authenticating-to-github/creating-a-personal-access-token) with the *public_repo* box checked. Also check the **workflow** checkbox in case your repo uses GitHub Actions for continuous integration.
2. Create a repository with that name on GitHub. Unlike Gitea, GitHub does not support creating repositories by pushing to the remote. You can also use an existing remote repo if it has the same commit history as your Gitea repo.
3. In the settings of your Gitea repo, fill in the **Git Remote Repository URL**: `https://github.com/<your_github_group>/<your_github_project>.git`.
4. Fill in the **Authorization** fields with your GitHub username and the personal access token as **Password**.
@ -91,10 +91,10 @@ The repository pushes shortly thereafter. To force a push, select the **Synchron
### Mirror an existing ssh repository
Currently gitea supports no ssh push mirrors. You can work around this by adding a `post-receive` hook to your gitea repository that pushes manually.
Currently Gitea supports no ssh push mirrors. You can work around this by adding a `post-receive` hook to your Gitea repository that pushes manually.
1. Make sure the user running gitea has access to the git repo you are trying to mirror to from shell.
2. On the Webinterface at the repository settings > git hooks add a post-receive hook for the mirror. I.e.
1. Make sure the user running Gitea has access to the git repo you are trying to mirror to from shell.
2. On the web interface at the repository settings > git hooks add a post-receive hook for the mirror. I.e.
```
#!/usr/bin/env bash

4
go.mod
View File

@ -16,6 +16,7 @@ require (
gitea.com/lunny/levelqueue v0.4.2-0.20230414023320-3c0159fe0fe4
github.com/42wim/sshsig v0.0.0-20211121163825-841cf5bbc121
github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358
github.com/ProtonMail/go-crypto v1.0.0
github.com/PuerkitoBio/goquery v1.9.1
github.com/alecthomas/chroma/v2 v2.13.0
github.com/blakesmith/ar v0.0.0-20190502131153-809d4375e1fb
@ -53,7 +54,7 @@ require (
github.com/gogs/chardet v0.0.0-20211120154057-b7413eaefb8f
github.com/gogs/go-gogs-client v0.0.0-20210131175652-1d7215cd8d85
github.com/golang-jwt/jwt/v5 v5.2.1
github.com/google/go-github/v57 v57.0.0
github.com/google/go-github/v61 v61.0.0
github.com/google/pprof v0.0.0-20240227163752-401108e1b7e7
github.com/google/uuid v1.6.0
github.com/gorilla/feeds v1.1.2
@ -135,7 +136,6 @@ require (
github.com/Masterminds/semver/v3 v3.2.1 // indirect
github.com/Masterminds/sprig/v3 v3.2.3 // indirect
github.com/Microsoft/go-winio v0.6.1 // indirect
github.com/ProtonMail/go-crypto v1.0.0 // indirect
github.com/RoaringBitmap/roaring v1.9.0 // indirect
github.com/andybalholm/brotli v1.1.0 // indirect
github.com/andybalholm/cascadia v1.3.2 // indirect

4
go.sum
View File

@ -394,8 +394,8 @@ github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-github/v57 v57.0.0 h1:L+Y3UPTY8ALM8x+TV0lg+IEBI+upibemtBD8Q9u7zHs=
github.com/google/go-github/v57 v57.0.0/go.mod h1:s0omdnye0hvK/ecLvpsGfJMiRt85PimQh4oygmLIxHw=
github.com/google/go-github/v61 v61.0.0 h1:VwQCBwhyE9JclCI+22/7mLB1PuU9eowCXKY5pNlu1go=
github.com/google/go-github/v61 v61.0.0/go.mod h1:0WR+KmsWX75G2EbpyGsGmradjo3IiciuI4BmdVCobQY=
github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8=
github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU=
github.com/google/go-tpm v0.9.0 h1:sQF6YqWMi+SCXpsmS3fd21oPy/vSddwZry4JnmltHVk=

View File

@ -34,7 +34,7 @@ func TestXRef_AddCrossReferences(t *testing.T) {
// Comment on PR to reopen issue #1
content = fmt.Sprintf("content2, reopens #%d", itarget.Index)
c := testCreateComment(t, 1, 2, pr.ID, content)
c := testCreateComment(t, 2, pr.ID, content)
ref = unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{IssueID: itarget.ID, RefIssueID: pr.ID, RefCommentID: c.ID})
assert.Equal(t, issues_model.CommentTypeCommentRef, ref.Type)
assert.Equal(t, pr.RepoID, ref.RefRepoID)
@ -104,18 +104,18 @@ func TestXRef_ResolveCrossReferences(t *testing.T) {
pr := testCreatePR(t, 1, 2, "titlepr", fmt.Sprintf("closes #%d", i1.Index))
rp := unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{IssueID: i1.ID, RefIssueID: pr.Issue.ID, RefCommentID: 0})
c1 := testCreateComment(t, 1, 2, pr.Issue.ID, fmt.Sprintf("closes #%d", i2.Index))
c1 := testCreateComment(t, 2, pr.Issue.ID, fmt.Sprintf("closes #%d", i2.Index))
r1 := unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{IssueID: i2.ID, RefIssueID: pr.Issue.ID, RefCommentID: c1.ID})
// Must be ignored
c2 := testCreateComment(t, 1, 2, pr.Issue.ID, fmt.Sprintf("mentions #%d", i2.Index))
c2 := testCreateComment(t, 2, pr.Issue.ID, fmt.Sprintf("mentions #%d", i2.Index))
unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{IssueID: i2.ID, RefIssueID: pr.Issue.ID, RefCommentID: c2.ID})
// Must be superseded by c4/r4
c3 := testCreateComment(t, 1, 2, pr.Issue.ID, fmt.Sprintf("reopens #%d", i3.Index))
c3 := testCreateComment(t, 2, pr.Issue.ID, fmt.Sprintf("reopens #%d", i3.Index))
unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{IssueID: i3.ID, RefIssueID: pr.Issue.ID, RefCommentID: c3.ID})
c4 := testCreateComment(t, 1, 2, pr.Issue.ID, fmt.Sprintf("closes #%d", i3.Index))
c4 := testCreateComment(t, 2, pr.Issue.ID, fmt.Sprintf("closes #%d", i3.Index))
r4 := unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{IssueID: i3.ID, RefIssueID: pr.Issue.ID, RefCommentID: c4.ID})
refs, err := pr.ResolveCrossReferences(db.DefaultContext)
@ -168,7 +168,7 @@ func testCreatePR(t *testing.T, repo, doer int64, title, content string) *issues
return pr
}
func testCreateComment(t *testing.T, repo, doer, issue int64, content string) *issues_model.Comment {
func testCreateComment(t *testing.T, doer, issue int64, content string) *issues_model.Comment {
d := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: doer})
i := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: issue})
c := &issues_model.Comment{Type: issues_model.CommentTypeComment, PosterID: doer, Poster: d, IssueID: issue, Issue: i, Content: content}

View File

@ -291,15 +291,15 @@ func TestAccessibleReposEnv_CountRepos(t *testing.T) {
func TestAccessibleReposEnv_RepoIDs(t *testing.T) {
assert.NoError(t, unittest.PrepareTestDatabase())
org := unittest.AssertExistsAndLoadBean(t, &organization.Organization{ID: 3})
testSuccess := func(userID, _, pageSize int64, expectedRepoIDs []int64) {
testSuccess := func(userID int64, expectedRepoIDs []int64) {
env, err := organization.AccessibleReposEnv(db.DefaultContext, org, userID)
assert.NoError(t, err)
repoIDs, err := env.RepoIDs(1, 100)
assert.NoError(t, err)
assert.Equal(t, expectedRepoIDs, repoIDs)
}
testSuccess(2, 1, 100, []int64{3, 5, 32})
testSuccess(4, 0, 100, []int64{3, 32})
testSuccess(2, []int64{3, 5, 32})
testSuccess(4, []int64{3, 32})
}
func TestAccessibleReposEnv_Repos(t *testing.T) {

View File

@ -5,8 +5,8 @@ package user_test
import (
"context"
"crypto/rand"
"fmt"
"math/rand"
"strings"
"testing"
"time"

View File

@ -208,14 +208,14 @@ func detectMatched(gitRepo *git.Repository, commit *git.Commit, triggedEvent web
webhook_module.HookEventIssueAssign,
webhook_module.HookEventIssueLabel,
webhook_module.HookEventIssueMilestone:
return matchIssuesEvent(commit, payload.(*api.IssuePayload), evt)
return matchIssuesEvent(payload.(*api.IssuePayload), evt)
case // issue_comment
webhook_module.HookEventIssueComment,
// `pull_request_comment` is same as `issue_comment`
// See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_comment-use-issue_comment
webhook_module.HookEventPullRequestComment:
return matchIssueCommentEvent(commit, payload.(*api.IssueCommentPayload), evt)
return matchIssueCommentEvent(payload.(*api.IssueCommentPayload), evt)
case // pull_request
webhook_module.HookEventPullRequest,
@ -229,19 +229,19 @@ func detectMatched(gitRepo *git.Repository, commit *git.Commit, triggedEvent web
case // pull_request_review
webhook_module.HookEventPullRequestReviewApproved,
webhook_module.HookEventPullRequestReviewRejected:
return matchPullRequestReviewEvent(commit, payload.(*api.PullRequestPayload), evt)
return matchPullRequestReviewEvent(payload.(*api.PullRequestPayload), evt)
case // pull_request_review_comment
webhook_module.HookEventPullRequestReviewComment:
return matchPullRequestReviewCommentEvent(commit, payload.(*api.PullRequestPayload), evt)
return matchPullRequestReviewCommentEvent(payload.(*api.PullRequestPayload), evt)
case // release
webhook_module.HookEventRelease:
return matchReleaseEvent(commit, payload.(*api.ReleasePayload), evt)
return matchReleaseEvent(payload.(*api.ReleasePayload), evt)
case // registry_package
webhook_module.HookEventPackage:
return matchPackageEvent(commit, payload.(*api.PackagePayload), evt)
return matchPackageEvent(payload.(*api.PackagePayload), evt)
default:
log.Warn("unsupported event %q", triggedEvent)
@ -347,7 +347,7 @@ func matchPushEvent(commit *git.Commit, pushPayload *api.PushPayload, evt *jobpa
return matchTimes == len(evt.Acts())
}
func matchIssuesEvent(commit *git.Commit, issuePayload *api.IssuePayload, evt *jobparser.Event) bool {
func matchIssuesEvent(issuePayload *api.IssuePayload, evt *jobparser.Event) bool {
// with no special filter parameters
if len(evt.Acts()) == 0 {
return true
@ -495,7 +495,7 @@ func matchPullRequestEvent(gitRepo *git.Repository, commit *git.Commit, prPayloa
return activityTypeMatched && matchTimes == len(evt.Acts())
}
func matchIssueCommentEvent(commit *git.Commit, issueCommentPayload *api.IssueCommentPayload, evt *jobparser.Event) bool {
func matchIssueCommentEvent(issueCommentPayload *api.IssueCommentPayload, evt *jobparser.Event) bool {
// with no special filter parameters
if len(evt.Acts()) == 0 {
return true
@ -527,7 +527,7 @@ func matchIssueCommentEvent(commit *git.Commit, issueCommentPayload *api.IssueCo
return matchTimes == len(evt.Acts())
}
func matchPullRequestReviewEvent(commit *git.Commit, prPayload *api.PullRequestPayload, evt *jobparser.Event) bool {
func matchPullRequestReviewEvent(prPayload *api.PullRequestPayload, evt *jobparser.Event) bool {
// with no special filter parameters
if len(evt.Acts()) == 0 {
return true
@ -576,7 +576,7 @@ func matchPullRequestReviewEvent(commit *git.Commit, prPayload *api.PullRequestP
return matchTimes == len(evt.Acts())
}
func matchPullRequestReviewCommentEvent(commit *git.Commit, prPayload *api.PullRequestPayload, evt *jobparser.Event) bool {
func matchPullRequestReviewCommentEvent(prPayload *api.PullRequestPayload, evt *jobparser.Event) bool {
// with no special filter parameters
if len(evt.Acts()) == 0 {
return true
@ -625,7 +625,7 @@ func matchPullRequestReviewCommentEvent(commit *git.Commit, prPayload *api.PullR
return matchTimes == len(evt.Acts())
}
func matchReleaseEvent(commit *git.Commit, payload *api.ReleasePayload, evt *jobparser.Event) bool {
func matchReleaseEvent(payload *api.ReleasePayload, evt *jobparser.Event) bool {
// with no special filter parameters
if len(evt.Acts()) == 0 {
return true
@ -662,7 +662,7 @@ func matchReleaseEvent(commit *git.Commit, payload *api.ReleasePayload, evt *job
return matchTimes == len(evt.Acts())
}
func matchPackageEvent(commit *git.Commit, payload *api.PackagePayload, evt *jobparser.Event) bool {
func matchPackageEvent(payload *api.PackagePayload, evt *jobparser.Event) bool {
// with no special filter parameters
if len(evt.Acts()) == 0 {
return true

View File

@ -4,9 +4,8 @@
package pwn
import (
"math/rand"
"math/rand/v2"
"net/http"
"os"
"strings"
"testing"
"time"
@ -18,11 +17,6 @@ var client = New(WithHTTP(&http.Client{
Timeout: time.Second * 2,
}))
func TestMain(m *testing.M) {
rand.Seed(time.Now().Unix())
os.Exit(m.Run())
}
func TestPassword(t *testing.T) {
// Check input error
_, err := client.CheckPassword("", false)
@ -81,24 +75,24 @@ func testPassword() string {
// Set special character
for i := 0; i < 5; i++ {
random := rand.Intn(len(specialCharSet))
random := rand.IntN(len(specialCharSet))
password.WriteString(string(specialCharSet[random]))
}
// Set numeric
for i := 0; i < 5; i++ {
random := rand.Intn(len(numberSet))
random := rand.IntN(len(numberSet))
password.WriteString(string(numberSet[random]))
}
// Set uppercase
for i := 0; i < 5; i++ {
random := rand.Intn(len(upperCharSet))
random := rand.IntN(len(upperCharSet))
password.WriteString(string(upperCharSet[random]))
}
for i := 0; i < 5; i++ {
random := rand.Intn(len(allCharSet))
random := rand.IntN(len(allCharSet))
password.WriteString(string(allCharSet[random]))
}
inRune := []rune(password.String())

View File

@ -29,7 +29,7 @@ func (tes Entries) GetCommitsInfo(ctx context.Context, commit *Commit, treePath
var revs map[string]*Commit
if commit.repo.LastCommitCache != nil {
var unHitPaths []string
revs, unHitPaths, err = getLastCommitForPathsByCache(ctx, commit.ID.String(), treePath, entryPaths, commit.repo.LastCommitCache)
revs, unHitPaths, err = getLastCommitForPathsByCache(commit.ID.String(), treePath, entryPaths, commit.repo.LastCommitCache)
if err != nil {
return nil, nil, err
}
@ -97,7 +97,7 @@ func (tes Entries) GetCommitsInfo(ctx context.Context, commit *Commit, treePath
return commitsInfo, treeCommit, nil
}
func getLastCommitForPathsByCache(ctx context.Context, commitID, treePath string, paths []string, cache *LastCommitCache) (map[string]*Commit, []string, error) {
func getLastCommitForPathsByCache(commitID, treePath string, paths []string, cache *LastCommitCache) (map[string]*Commit, []string, error) {
var unHitEntryPaths []string
results := make(map[string]*Commit)
for _, p := range paths {

View File

@ -18,7 +18,7 @@ import (
)
// ParseTreeEntries parses the output of a `git ls-tree -l` command.
func ParseTreeEntries(h ObjectFormat, data []byte) ([]*TreeEntry, error) {
func ParseTreeEntries(data []byte) ([]*TreeEntry, error) {
return parseTreeEntries(data, nil)
}

View File

@ -67,7 +67,7 @@ func TestParseTreeEntries(t *testing.T) {
}
for _, testCase := range testCases {
entries, err := ParseTreeEntries(Sha1ObjectFormat, []byte(testCase.Input))
entries, err := ParseTreeEntries([]byte(testCase.Input))
assert.NoError(t, err)
if len(entries) > 1 {
fmt.Println(testCase.Expected[0].ID)

View File

@ -17,13 +17,13 @@ import (
)
// ParseTreeEntries parses the output of a `git ls-tree -l` command.
func ParseTreeEntries(objectFormat ObjectFormat, data []byte) ([]*TreeEntry, error) {
return parseTreeEntries(objectFormat, data, nil)
func ParseTreeEntries(data []byte) ([]*TreeEntry, error) {
return parseTreeEntries(data, nil)
}
var sepSpace = []byte{' '}
func parseTreeEntries(objectFormat ObjectFormat, data []byte, ptree *Tree) ([]*TreeEntry, error) {
func parseTreeEntries(data []byte, ptree *Tree) ([]*TreeEntry, error) {
var err error
entries := make([]*TreeEntry, 0, bytes.Count(data, []byte{'\n'})+1)
for pos := 0; pos < len(data); {

View File

@ -12,8 +12,6 @@ import (
)
func TestParseTreeEntriesLong(t *testing.T) {
objectFormat := Sha1ObjectFormat
testCases := []struct {
Input string
Expected []*TreeEntry
@ -56,7 +54,7 @@ func TestParseTreeEntriesLong(t *testing.T) {
},
}
for _, testCase := range testCases {
entries, err := ParseTreeEntries(objectFormat, []byte(testCase.Input))
entries, err := ParseTreeEntries([]byte(testCase.Input))
assert.NoError(t, err)
assert.Len(t, entries, len(testCase.Expected))
for i, entry := range entries {
@ -66,8 +64,6 @@ func TestParseTreeEntriesLong(t *testing.T) {
}
func TestParseTreeEntriesShort(t *testing.T) {
objectFormat := Sha1ObjectFormat
testCases := []struct {
Input string
Expected []*TreeEntry
@ -91,7 +87,7 @@ func TestParseTreeEntriesShort(t *testing.T) {
},
}
for _, testCase := range testCases {
entries, err := ParseTreeEntries(objectFormat, []byte(testCase.Input))
entries, err := ParseTreeEntries([]byte(testCase.Input))
assert.NoError(t, err)
assert.Len(t, entries, len(testCase.Expected))
for i, entry := range entries {
@ -102,7 +98,7 @@ func TestParseTreeEntriesShort(t *testing.T) {
func TestParseTreeEntriesInvalid(t *testing.T) {
// there was a panic: "runtime error: slice bounds out of range" when the input was invalid: #20315
entries, err := ParseTreeEntries(Sha1ObjectFormat, []byte("100644 blob ea0d83c9081af9500ac9f804101b3fd0a5c293af"))
entries, err := ParseTreeEntries([]byte("100644 blob ea0d83c9081af9500ac9f804101b3fd0a5c293af"))
assert.Error(t, err)
assert.Len(t, entries, 0)
}

View File

@ -77,11 +77,8 @@ func (t *Tree) ListEntries() (Entries, error) {
return nil, runErr
}
objectFormat, err := t.repo.GetObjectFormat()
if err != nil {
return nil, err
}
t.entries, err = parseTreeEntries(objectFormat, stdout, t)
var err error
t.entries, err = parseTreeEntries(stdout, t)
if err == nil {
t.entriesParsed = true
}
@ -104,11 +101,8 @@ func (t *Tree) listEntriesRecursive(extraArgs TrustedCmdArgs) (Entries, error) {
return nil, runErr
}
objectFormat, err := t.repo.GetObjectFormat()
if err != nil {
return nil, err
}
t.entriesRecursive, err = parseTreeEntries(objectFormat, stdout, t)
var err error
t.entriesRecursive, err = parseTreeEntries(stdout, t)
if err == nil {
t.entriesRecursiveParsed = true
}

View File

@ -62,8 +62,8 @@ func isIndexable(entry *git.TreeEntry) bool {
}
// parseGitLsTreeOutput parses the output of a `git ls-tree -r --full-name` command
func parseGitLsTreeOutput(objectFormat git.ObjectFormat, stdout []byte) ([]internal.FileUpdate, error) {
entries, err := git.ParseTreeEntries(objectFormat, stdout)
func parseGitLsTreeOutput(stdout []byte) ([]internal.FileUpdate, error) {
entries, err := git.ParseTreeEntries(stdout)
if err != nil {
return nil, err
}
@ -91,10 +91,8 @@ func genesisChanges(ctx context.Context, repo *repo_model.Repository, revision s
return nil, runErr
}
objectFormat := git.ObjectFormatFromName(repo.ObjectFormatName)
var err error
changes.Updates, err = parseGitLsTreeOutput(objectFormat, stdout)
changes.Updates, err = parseGitLsTreeOutput(stdout)
return &changes, err
}
@ -172,8 +170,6 @@ func nonGenesisChanges(ctx context.Context, repo *repo_model.Repository, revisio
return nil, err
}
objectFormat := git.ObjectFormatFromName(repo.ObjectFormatName)
changes.Updates, err = parseGitLsTreeOutput(objectFormat, lsTreeStdout)
changes.Updates, err = parseGitLsTreeOutput(lsTreeStdout)
return &changes, err
}

View File

@ -65,11 +65,11 @@ func (g *ASTTransformer) Transform(node *ast.Document, reader text.Reader, pc pa
case *ast.Paragraph:
g.applyElementDir(v)
case *ast.Image:
g.transformImage(ctx, v, reader)
g.transformImage(ctx, v)
case *ast.Link:
g.transformLink(ctx, v, reader)
g.transformLink(ctx, v)
case *ast.List:
g.transformList(ctx, v, reader, rc)
g.transformList(ctx, v, rc)
case *ast.Text:
if v.SoftLineBreak() && !v.HardLineBreak() {
if ctx.Metas["mode"] != "document" {

View File

@ -68,7 +68,7 @@ func cssColorHandler(value string) bool {
return css.HSLA.MatchString(value)
}
func (g *ASTTransformer) transformCodeSpan(ctx *markup.RenderContext, v *ast.CodeSpan, reader text.Reader) {
func (g *ASTTransformer) transformCodeSpan(_ *markup.RenderContext, v *ast.CodeSpan, reader text.Reader) {
colorContent := v.Text(reader.Source())
if cssColorHandler(string(colorContent)) {
v.AppendChild(v, NewColorPreview(colorContent))

View File

@ -13,7 +13,7 @@ import (
"github.com/yuin/goldmark/util"
)
func (g *ASTTransformer) transformHeading(ctx *markup.RenderContext, v *ast.Heading, reader text.Reader, tocList *[]markup.Header) {
func (g *ASTTransformer) transformHeading(_ *markup.RenderContext, v *ast.Heading, reader text.Reader, tocList *[]markup.Header) {
for _, attr := range v.Attributes() {
if _, ok := attr.Value.([]byte); !ok {
v.SetAttribute(attr.Name, []byte(fmt.Sprintf("%v", attr.Value)))

View File

@ -10,10 +10,9 @@ import (
giteautil "code.gitea.io/gitea/modules/util"
"github.com/yuin/goldmark/ast"
"github.com/yuin/goldmark/text"
)
func (g *ASTTransformer) transformImage(ctx *markup.RenderContext, v *ast.Image, reader text.Reader) {
func (g *ASTTransformer) transformImage(ctx *markup.RenderContext, v *ast.Image) {
// Images need two things:
//
// 1. Their src needs to munged to be a real value

View File

@ -10,10 +10,9 @@ import (
giteautil "code.gitea.io/gitea/modules/util"
"github.com/yuin/goldmark/ast"
"github.com/yuin/goldmark/text"
)
func (g *ASTTransformer) transformLink(ctx *markup.RenderContext, v *ast.Link, reader text.Reader) {
func (g *ASTTransformer) transformLink(ctx *markup.RenderContext, v *ast.Link) {
// Links need their href to munged to be a real value
link := v.Destination
isAnchorFragment := len(link) > 0 && link[0] == '#'

View File

@ -11,7 +11,6 @@ import (
"github.com/yuin/goldmark/ast"
east "github.com/yuin/goldmark/extension/ast"
"github.com/yuin/goldmark/renderer/html"
"github.com/yuin/goldmark/text"
"github.com/yuin/goldmark/util"
)
@ -50,7 +49,7 @@ func (r *HTMLRenderer) renderTaskCheckBox(w util.BufWriter, source []byte, node
return ast.WalkContinue, nil
}
func (g *ASTTransformer) transformList(ctx *markup.RenderContext, v *ast.List, reader text.Reader, rc *RenderConfig) {
func (g *ASTTransformer) transformList(_ *markup.RenderContext, v *ast.List, rc *RenderConfig) {
if v.HasChildren() {
children := make([]ast.Node, 0, v.ChildCount())
child := v.FirstChild()

View File

@ -54,7 +54,7 @@ func (r *stripRenderer) Render(w io.Writer, source []byte, doc ast.Node) error {
}
return ast.WalkContinue, nil
case *ast.Link:
r.processLink(w, v.Destination)
r.processLink(v.Destination)
return ast.WalkSkipChildren, nil
case *ast.AutoLink:
// This could be a reference to an issue or pull - if so convert it
@ -124,7 +124,7 @@ func (r *stripRenderer) processAutoLink(w io.Writer, link []byte) {
_, _ = w.Write([]byte(parts[4]))
}
func (r *stripRenderer) processLink(w io.Writer, link []byte) {
func (r *stripRenderer) processLink(link []byte) {
// Links are processed out of band
r.links = append(r.links, string(link))
}

View File

@ -22,7 +22,7 @@ func TestOption(t *testing.T) {
assert.Equal(t, int(0), none.Value())
assert.Equal(t, int(1), none.ValueOrDefault(1))
some := optional.Some[int](1)
some := optional.Some(1)
assert.True(t, some.Has())
assert.Equal(t, int(1), some.Value())
assert.Equal(t, int(1), some.ValueOrDefault(2))

View File

@ -78,6 +78,7 @@ type PackageMetadataVersion struct {
Repository Repository `json:"repository,omitempty"`
Keywords []string `json:"keywords,omitempty"`
Dependencies map[string]string `json:"dependencies,omitempty"`
BundleDependencies []string `json:"bundleDependencies,omitempty"`
DevDependencies map[string]string `json:"devDependencies,omitempty"`
PeerDependencies map[string]string `json:"peerDependencies,omitempty"`
Bin map[string]string `json:"bin,omitempty"`
@ -218,6 +219,7 @@ func ParsePackage(r io.Reader) (*Package, error) {
ProjectURL: meta.Homepage,
Keywords: meta.Keywords,
Dependencies: meta.Dependencies,
BundleDependencies: meta.BundleDependencies,
DevelopmentDependencies: meta.DevDependencies,
PeerDependencies: meta.PeerDependencies,
OptionalDependencies: meta.OptionalDependencies,

View File

@ -16,6 +16,7 @@ type Metadata struct {
ProjectURL string `json:"project_url,omitempty"`
Keywords []string `json:"keywords,omitempty"`
Dependencies map[string]string `json:"dependencies,omitempty"`
BundleDependencies []string `json:"bundleDependencies,omitempty"`
DevelopmentDependencies map[string]string `json:"development_dependencies,omitempty"`
PeerDependencies map[string]string `json:"peer_dependencies,omitempty"`
OptionalDependencies map[string]string `json:"optional_dependencies,omitempty"`

View File

@ -38,12 +38,12 @@ func loadIncomingEmailFrom(rootCfg ConfigProvider) {
return
}
if err := checkReplyToAddress(IncomingEmail.ReplyToAddress); err != nil {
if err := checkReplyToAddress(); err != nil {
log.Fatal("Invalid incoming_mail.REPLY_TO_ADDRESS (%s): %v", IncomingEmail.ReplyToAddress, err)
}
}
func checkReplyToAddress(address string) error {
func checkReplyToAddress() error {
parsed, err := mail.ParseAddress(IncomingEmail.ReplyToAddress)
if err != nil {
return err

View File

@ -97,7 +97,7 @@ func getStorage(rootCfg ConfigProvider, name, typ string, sec ConfigSection) (*S
return nil, err
}
overrideSec := getStorageOverrideSection(rootCfg, targetSec, sec, tp, name)
overrideSec := getStorageOverrideSection(rootCfg, sec, tp, name)
targetType := targetSec.Key("STORAGE_TYPE").String()
switch targetType {
@ -189,7 +189,7 @@ func getStorageTargetSection(rootCfg ConfigProvider, name, typ string, sec Confi
}
// getStorageOverrideSection override section will be read SERVE_DIRECT, PATH, MINIO_BASE_PATH, MINIO_BUCKET to override the targetsec when possible
func getStorageOverrideSection(rootConfig ConfigProvider, targetSec, sec ConfigSection, targetSecType targetSecType, name string) ConfigSection {
func getStorageOverrideSection(rootConfig ConfigProvider, sec ConfigSection, targetSecType targetSecType, name string) ConfigSection {
if targetSecType == targetSecIsSec {
return nil
}

121
options/license/Catharon Normal file
View File

@ -0,0 +1,121 @@
The Catharon Open Source LICENSE
----------------------------
2000-Jul-04
Copyright (C) 2000 by Catharon Productions, Inc.
Introduction
============
This license applies to source files distributed by Catharon
Productions, Inc. in several archive packages. This license
applies to all files found in such packages which do not fall
under their own explicit license.
This license was inspired by the BSD, Artistic, and IJG
(Independent JPEG Group) licenses, which all encourage inclusion
and use of free software in commercial and freeware products
alike. As a consequence, its main points are that:
o We don't promise that this software works. However, we are
interested in any kind of bug reports. (`as is' distribution)
o You can use this software for whatever you want, in parts or
full form, without having to pay us. (`royalty-free' usage)
o You may not pretend that you wrote this software. If you use
it, or only parts of it, in a program, you must acknowledge
somewhere in your documentation that you have used the
Catharon Code. (`credits')
We specifically permit and encourage the inclusion of this
software, with or without modifications, in commercial products.
We disclaim all warranties covering the packages distributed by
Catharon Productions, Inc. and assume no liability related to
their use.
Legal Terms
===========
0. Definitions
--------------
Throughout this license, the terms `Catharon Package', `package',
and `Catharon Code' refer to the set of files originally
distributed by Catharon Productions, Inc.
`You' refers to the licensee, or person using the project, where
`using' is a generic term including compiling the project's source
code as well as linking it to form a `program' or `executable'.
This program is referred to as `a program using one of the
Catharon Packages'.
This license applies to all files distributed in the original
Catharon Package(s), including all source code, binaries and
documentation, unless otherwise stated in the file in its
original, unmodified form as distributed in the original archive.
If you are unsure whether or not a particular file is covered by
this license, you must contact us to verify this.
The Catharon Packages are copyright (C) 2000 by Catharon
Productions, Inc. All rights reserved except as specified below.
1. No Warranty
--------------
THE CATHARON PACKAGES ARE PROVIDED `AS IS' WITHOUT WARRANTY OF ANY
KIND, EITHER EXPRESS OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. IN NO EVENT WILL ANY OF THE AUTHORS OR COPYRIGHT HOLDERS
BE LIABLE FOR ANY DAMAGES CAUSED BY THE USE OF OR THE INABILITY TO
USE THE CATHARON PACKAGE.
2. Redistribution
-----------------
This license grants a worldwide, royalty-free, perpetual and
irrevocable right and license to use, execute, perform, compile,
display, copy, create derivative works of, distribute and
sublicense the Catharon Packages (in both source and object code
forms) and derivative works thereof for any purpose; and to
authorize others to exercise some or all of the rights granted
herein, subject to the following conditions:
o Redistribution of source code must retain this license file
(`license.txt') unaltered; any additions, deletions or changes
to the original files must be clearly indicated in
accompanying documentation. The copyright notices of the
unaltered, original files must be preserved in all copies of
source files.
o Redistribution in binary form must provide a disclaimer that
states that the software is based in part on the work of
Catharon Productions, Inc. in the distribution documentation.
These conditions apply to any software derived from or based on
the Catharon Packages, not just the unmodified files. If you use
our work, you must acknowledge us. However, no fee need be paid
to us.
3. Advertising
--------------
Neither Catharon Productions, Inc. and contributors nor you shall
use the name of the other for commercial, advertising, or
promotional purposes without specific prior written permission.
We suggest, but do not require, that you use the following phrase
to refer to this software in your documentation: 'this software is
based in part on the Catharon Typography Project'.
As you have not signed this license, you are not required to
accept it. However, as the Catharon Packages are copyrighted
material, only this license, or another one contracted with the
authors, grants you the right to use, distribute, and modify it.
Therefore, by using, distributing, or modifying the Catharon
Packages, you indicate that you understand and accept all the
terms of this license.

View File

@ -3495,6 +3495,7 @@ npm.install = To install the package using npm, run the following command:
npm.install2 = or add it to the package.json file:
npm.dependencies = Dependencies
npm.dependencies.development = Development Dependencies
npm.dependencies.bundle = Bundled Dependencies
npm.dependencies.peer = Peer Dependencies
npm.dependencies.optional = Optional Dependencies
npm.details.tag = Tag

View File

@ -466,14 +466,15 @@ func (ar artifactRoutes) downloadArtifact(ctx *ArtifactContext) {
log.Error("Error getting artifact: %v", err)
ctx.Error(http.StatusInternalServerError, err.Error())
return
} else if !exist {
}
if !exist {
log.Error("artifact with ID %d does not exist", artifactID)
ctx.Error(http.StatusNotFound, fmt.Sprintf("artifact with ID %d does not exist", artifactID))
return
}
if artifact.RunID != runID {
log.Error("Error dismatch runID and artifactID, task: %v, artifact: %v", runID, artifactID)
ctx.Error(http.StatusBadRequest, err.Error())
log.Error("Error mismatch runID and artifactID, task: %v, artifact: %v", runID, artifactID)
ctx.Error(http.StatusBadRequest)
return
}

View File

@ -64,6 +64,7 @@ func createPackageMetadataVersion(registryURL string, pd *packages_model.Package
Homepage: metadata.ProjectURL,
License: metadata.License,
Dependencies: metadata.Dependencies,
BundleDependencies: metadata.BundleDependencies,
DevDependencies: metadata.DevelopmentDependencies,
PeerDependencies: metadata.PeerDependencies,
OptionalDependencies: metadata.OptionalDependencies,

View File

@ -67,7 +67,7 @@ func AddUserBadges(ctx *context.APIContext) {
// "$ref": "#/responses/forbidden"
form := web.GetForm(ctx).(*api.UserBadgeOption)
badges := prepareBadgesForReplaceOrAdd(ctx, *form)
badges := prepareBadgesForReplaceOrAdd(*form)
if err := user_model.AddUserBadges(ctx, ctx.ContextUser, badges); err != nil {
ctx.Error(http.StatusInternalServerError, "ReplaceUserBadges", err)
@ -103,7 +103,7 @@ func DeleteUserBadges(ctx *context.APIContext) {
// "$ref": "#/responses/validationError"
form := web.GetForm(ctx).(*api.UserBadgeOption)
badges := prepareBadgesForReplaceOrAdd(ctx, *form)
badges := prepareBadgesForReplaceOrAdd(*form)
if err := user_model.RemoveUserBadges(ctx, ctx.ContextUser, badges); err != nil {
ctx.Error(http.StatusInternalServerError, "ReplaceUserBadges", err)
@ -113,7 +113,7 @@ func DeleteUserBadges(ctx *context.APIContext) {
ctx.Status(http.StatusNoContent)
}
func prepareBadgesForReplaceOrAdd(ctx *context.APIContext, form api.UserBadgeOption) []*user_model.Badge {
func prepareBadgesForReplaceOrAdd(form api.UserBadgeOption) []*user_model.Badge {
badges := make([]*user_model.Badge, len(form.BadgeSlugs))
for i, badge := range form.BadgeSlugs {
badges[i] = &user_model.Badge{

View File

@ -180,7 +180,7 @@ func Migrate(ctx *context.APIContext) {
Status: repo_model.RepositoryBeingMigrated,
})
if err != nil {
handleMigrateError(ctx, repoOwner, remoteAddr, err)
handleMigrateError(ctx, repoOwner, err)
return
}
@ -207,7 +207,7 @@ func Migrate(ctx *context.APIContext) {
}()
if repo, err = migrations.MigrateRepository(graceful.GetManager().HammerContext(), ctx.Doer, repoOwner.Name, opts, nil); err != nil {
handleMigrateError(ctx, repoOwner, remoteAddr, err)
handleMigrateError(ctx, repoOwner, err)
return
}
@ -215,7 +215,7 @@ func Migrate(ctx *context.APIContext) {
ctx.JSON(http.StatusCreated, convert.ToRepo(ctx, repo, access_model.Permission{AccessMode: perm.AccessModeAdmin}))
}
func handleMigrateError(ctx *context.APIContext, repoOwner *user_model.User, remoteAddr string, err error) {
func handleMigrateError(ctx *context.APIContext, repoOwner *user_model.User, err error) {
switch {
case repo_model.IsErrRepoAlreadyExist(err):
ctx.Error(http.StatusConflict, "", "The repository with the same name already exists.")

View File

@ -121,9 +121,9 @@ func HookPreReceive(ctx *gitea_context.PrivateContext) {
case refFullName.IsBranch():
preReceiveBranch(ourCtx, oldCommitID, newCommitID, refFullName)
case refFullName.IsTag():
preReceiveTag(ourCtx, oldCommitID, newCommitID, refFullName)
preReceiveTag(ourCtx, refFullName)
case git.DefaultFeatures.SupportProcReceive && refFullName.IsFor():
preReceiveFor(ourCtx, oldCommitID, newCommitID, refFullName)
preReceiveFor(ourCtx, refFullName)
default:
ourCtx.AssertCanWriteCode()
}
@ -368,7 +368,7 @@ func preReceiveBranch(ctx *preReceiveContext, oldCommitID, newCommitID string, r
}
}
func preReceiveTag(ctx *preReceiveContext, oldCommitID, newCommitID string, refFullName git.RefName) {
func preReceiveTag(ctx *preReceiveContext, refFullName git.RefName) {
if !ctx.AssertCanWriteCode() {
return
}
@ -404,7 +404,7 @@ func preReceiveTag(ctx *preReceiveContext, oldCommitID, newCommitID string, refF
}
}
func preReceiveFor(ctx *preReceiveContext, oldCommitID, newCommitID string, refFullName git.RefName) {
func preReceiveFor(ctx *preReceiveContext, refFullName git.RefName) {
if !ctx.AssertCreatePullRequest() {
return
}

View File

@ -159,7 +159,7 @@ func DashboardPost(ctx *context.Context) {
switch form.Op {
case "sync_repo_branches":
go func() {
if err := repo_service.AddAllRepoBranchesToSyncQueue(graceful.GetManager().ShutdownContext(), ctx.Doer.ID); err != nil {
if err := repo_service.AddAllRepoBranchesToSyncQueue(graceful.GetManager().ShutdownContext()); err != nil {
log.Error("AddAllRepoBranchesToSyncQueue: %v: %v", ctx.Doer.ID, err)
}
}()

View File

@ -279,7 +279,7 @@ func GetFeedType(name string, req *http.Request) (bool, string, string) {
}
// feedActionsToFeedItems convert gitea's Repo's Releases to feeds Item
func releasesToFeedItems(ctx *context.Context, releases []*repo_model.Release, isReleasesOnly bool) (items []*feeds.Item, err error) {
func releasesToFeedItems(ctx *context.Context, releases []*repo_model.Release) (items []*feeds.Item, err error) {
for _, rel := range releases {
err := rel.LoadAttributes(ctx)
if err != nil {

View File

@ -42,7 +42,7 @@ func ShowReleaseFeed(ctx *context.Context, repo *repo_model.Repository, isReleas
Created: time.Now(),
}
feed.Items, err = releasesToFeedItems(ctx, releases, isReleasesOnly)
feed.Items, err = releasesToFeedItems(ctx, releases)
if err != nil {
ctx.ServerError("releasesToFeedItems", err)
return

View File

@ -504,7 +504,7 @@ func getRunJobs(ctx *context_module.Context, runIndex, jobIndex int64) (*actions
return nil, nil
}
if len(jobs) == 0 {
ctx.Error(http.StatusNotFound, err.Error())
ctx.Error(http.StatusNotFound)
return nil, nil
}

View File

@ -787,7 +787,7 @@ func (rt RepoRefType) RefTypeIncludesTags() bool {
return false
}
func getRefNameFromPath(ctx *Base, repo *Repository, path string, isExist func(string) bool) string {
func getRefNameFromPath(repo *Repository, path string, isExist func(string) bool) string {
refName := ""
parts := strings.Split(path, "/")
for i, part := range parts {
@ -823,7 +823,7 @@ func getRefName(ctx *Base, repo *Repository, pathType RepoRefType) string {
repo.TreePath = path
return repo.Repository.DefaultBranch
case RepoRefBranch:
ref := getRefNameFromPath(ctx, repo, path, repo.GitRepo.IsBranchExist)
ref := getRefNameFromPath(repo, path, repo.GitRepo.IsBranchExist)
if len(ref) == 0 {
// check if ref is HEAD
parts := strings.Split(path, "/")
@ -833,7 +833,7 @@ func getRefName(ctx *Base, repo *Repository, pathType RepoRefType) string {
}
// maybe it's a renamed branch
return getRefNameFromPath(ctx, repo, path, func(s string) bool {
return getRefNameFromPath(repo, path, func(s string) bool {
b, exist, err := git_model.FindRenamedBranch(ctx, repo.Repository.ID, s)
if err != nil {
log.Error("FindRenamedBranch: %v", err)
@ -853,7 +853,7 @@ func getRefName(ctx *Base, repo *Repository, pathType RepoRefType) string {
return ref
case RepoRefTag:
return getRefNameFromPath(ctx, repo, path, repo.GitRepo.IsTagExist)
return getRefNameFromPath(repo, path, repo.GitRepo.IsTagExist)
case RepoRefCommit:
parts := strings.Split(path, "/")

View File

@ -27,7 +27,7 @@ type commonStorageCheckOptions struct {
name string
}
func commonCheckStorage(ctx context.Context, logger log.Logger, autofix bool, opts *commonStorageCheckOptions) error {
func commonCheckStorage(logger log.Logger, autofix bool, opts *commonStorageCheckOptions) error {
totalCount, orphanedCount := 0, 0
totalSize, orphanedSize := int64(0), int64(0)
@ -98,7 +98,7 @@ func checkStorage(opts *checkStorageOptions) func(ctx context.Context, logger lo
}
if opts.Attachments || opts.All {
if err := commonCheckStorage(ctx, logger, autofix,
if err := commonCheckStorage(logger, autofix,
&commonStorageCheckOptions{
storer: storage.Attachments,
isOrphaned: func(path string, obj storage.Object, stat fs.FileInfo) (bool, error) {
@ -116,7 +116,7 @@ func checkStorage(opts *checkStorageOptions) func(ctx context.Context, logger lo
logger.Info("LFS isn't enabled (skipped)")
return nil
}
if err := commonCheckStorage(ctx, logger, autofix,
if err := commonCheckStorage(logger, autofix,
&commonStorageCheckOptions{
storer: storage.LFS,
isOrphaned: func(path string, obj storage.Object, stat fs.FileInfo) (bool, error) {
@ -132,7 +132,7 @@ func checkStorage(opts *checkStorageOptions) func(ctx context.Context, logger lo
}
if opts.Avatars || opts.All {
if err := commonCheckStorage(ctx, logger, autofix,
if err := commonCheckStorage(logger, autofix,
&commonStorageCheckOptions{
storer: storage.Avatars,
isOrphaned: func(path string, obj storage.Object, stat fs.FileInfo) (bool, error) {
@ -146,7 +146,7 @@ func checkStorage(opts *checkStorageOptions) func(ctx context.Context, logger lo
}
if opts.RepoAvatars || opts.All {
if err := commonCheckStorage(ctx, logger, autofix,
if err := commonCheckStorage(logger, autofix,
&commonStorageCheckOptions{
storer: storage.RepoAvatars,
isOrphaned: func(path string, obj storage.Object, stat fs.FileInfo) (bool, error) {
@ -160,7 +160,7 @@ func checkStorage(opts *checkStorageOptions) func(ctx context.Context, logger lo
}
if opts.RepoArchives || opts.All {
if err := commonCheckStorage(ctx, logger, autofix,
if err := commonCheckStorage(logger, autofix,
&commonStorageCheckOptions{
storer: storage.RepoArchives,
isOrphaned: func(path string, obj storage.Object, stat fs.FileInfo) (bool, error) {
@ -182,7 +182,7 @@ func checkStorage(opts *checkStorageOptions) func(ctx context.Context, logger lo
logger.Info("Packages isn't enabled (skipped)")
return nil
}
if err := commonCheckStorage(ctx, logger, autofix,
if err := commonCheckStorage(logger, autofix,
&commonStorageCheckOptions{
storer: storage.Packages,
isOrphaned: func(path string, obj storage.Object, stat fs.FileInfo) (bool, error) {

View File

@ -7,7 +7,7 @@ package migrations
import (
"errors"
"github.com/google/go-github/v57/github"
"github.com/google/go-github/v61/github"
)
// ErrRepoNotCreated returns the error that repository not created

View File

@ -977,25 +977,24 @@ func (g *GiteaLocalUploader) Finish() error {
}
func (g *GiteaLocalUploader) remapUser(source user_model.ExternalUserMigrated, target user_model.ExternalUserRemappable) error {
var userid int64
var userID int64
var err error
if g.sameApp {
userid, err = g.remapLocalUser(source, target)
userID, err = g.remapLocalUser(source)
} else {
userid, err = g.remapExternalUser(source, target)
userID, err = g.remapExternalUser(source)
}
if err != nil {
return err
}
if userid > 0 {
return target.RemapExternalUser("", 0, userid)
if userID > 0 {
return target.RemapExternalUser("", 0, userID)
}
return target.RemapExternalUser(source.GetExternalName(), source.GetExternalID(), g.doer.ID)
}
func (g *GiteaLocalUploader) remapLocalUser(source user_model.ExternalUserMigrated, target user_model.ExternalUserRemappable) (int64, error) {
func (g *GiteaLocalUploader) remapLocalUser(source user_model.ExternalUserMigrated) (int64, error) {
userid, ok := g.userMap[source.GetExternalID()]
if !ok {
name, err := user_model.GetUserNameByID(g.ctx, source.GetExternalID())
@ -1013,7 +1012,7 @@ func (g *GiteaLocalUploader) remapLocalUser(source user_model.ExternalUserMigrat
return userid, nil
}
func (g *GiteaLocalUploader) remapExternalUser(source user_model.ExternalUserMigrated, target user_model.ExternalUserRemappable) (userid int64, err error) {
func (g *GiteaLocalUploader) remapExternalUser(source user_model.ExternalUserMigrated) (userid int64, err error) {
userid, ok := g.userMap[source.GetExternalID()]
if !ok {
userid, err = user_model.GetUserIDByExternalUserID(g.ctx, g.gitServiceType.Name(), fmt.Sprintf("%d", source.GetExternalID()))

View File

@ -20,7 +20,7 @@ import (
"code.gitea.io/gitea/modules/proxy"
"code.gitea.io/gitea/modules/structs"
"github.com/google/go-github/v57/github"
"github.com/google/go-github/v61/github"
"golang.org/x/oauth2"
)

View File

@ -90,7 +90,7 @@ func Update(ctx context.Context, pullLimit, pushLimit int) error {
pullMirrorsRequested := 0
if pullLimit != 0 {
if err := repo_model.MirrorsIterate(ctx, pullLimit, func(idx int, bean any) error {
if err := repo_model.MirrorsIterate(ctx, pullLimit, func(_ int, bean any) error {
if err := handler(bean); err != nil {
return err
}

View File

@ -49,7 +49,7 @@ var ErrSubmitReviewOnClosedPR = errors.New("can't submit review for a closed or
// checkInvalidation checks if the line of code comment got changed by another commit.
// If the line got changed the comment is going to be invalidated.
func checkInvalidation(ctx context.Context, c *issues_model.Comment, doer *user_model.User, repo *git.Repository, branch string) error {
func checkInvalidation(ctx context.Context, c *issues_model.Comment, repo *git.Repository, branch string) error {
// FIXME differentiate between previous and proposed line
commit, err := repo.LineBlame(branch, repo.Path, c.TreePath, uint(c.UnsignedLine()))
if err != nil && (strings.Contains(err.Error(), "fatal: no such path") || notEnoughLines.MatchString(err.Error())) {
@ -83,7 +83,7 @@ func InvalidateCodeComments(ctx context.Context, prs issues_model.PullRequestLis
return fmt.Errorf("find code comments: %v", err)
}
for _, comment := range codeComments {
if err := checkInvalidation(ctx, comment, doer, repo, branch); err != nil {
if err := checkInvalidation(ctx, comment, repo, branch); err != nil {
return err
}
}

View File

@ -39,7 +39,7 @@ func Update(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.
go AddTestPullRequestTask(doer, pr.BaseRepo.ID, pr.BaseBranch, false, "", "")
}()
return updateHeadByRebaseOnToBase(ctx, pr, doer, message)
return updateHeadByRebaseOnToBase(ctx, pr, doer)
}
if err := pr.LoadBaseRepo(ctx); err != nil {

View File

@ -18,7 +18,7 @@ import (
)
// updateHeadByRebaseOnToBase handles updating a PR's head branch by rebasing it on the PR current base branch
func updateHeadByRebaseOnToBase(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, message string) error {
func updateHeadByRebaseOnToBase(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User) error {
// "Clone" base repo and add the cache headers for the head repo and branch
mergeCtx, cancel, err := createTemporaryRepoForMerge(ctx, pr, doer, "")
if err != nil {

View File

@ -80,7 +80,7 @@ func AdoptRepository(ctx context.Context, doer, u *user_model.User, opts CreateR
return fmt.Errorf("getRepositoryByID: %w", err)
}
if err := adoptRepository(ctx, repoPath, doer, repo, opts.DefaultBranch); err != nil {
if err := adoptRepository(ctx, repoPath, repo, opts.DefaultBranch); err != nil {
return fmt.Errorf("createDelegateHooks: %w", err)
}
@ -111,7 +111,7 @@ func AdoptRepository(ctx context.Context, doer, u *user_model.User, opts CreateR
return repo, nil
}
func adoptRepository(ctx context.Context, repoPath string, u *user_model.User, repo *repo_model.Repository, defaultBranch string) (err error) {
func adoptRepository(ctx context.Context, repoPath string, repo *repo_model.Repository, defaultBranch string) (err error) {
isExist, err := util.IsExist(repoPath)
if err != nil {
log.Error("Unable to check if %s exists. Error: %v", repoPath, err)

View File

@ -527,7 +527,7 @@ func handlerBranchSync(items ...*BranchSyncOptions) []*BranchSyncOptions {
return nil
}
func addRepoToBranchSyncQueue(repoID, doerID int64) error {
func addRepoToBranchSyncQueue(repoID int64) error {
return branchSyncQueue.Push(&BranchSyncOptions{
RepoID: repoID,
})
@ -543,9 +543,9 @@ func initBranchSyncQueue(ctx context.Context) error {
return nil
}
func AddAllRepoBranchesToSyncQueue(ctx context.Context, doerID int64) error {
func AddAllRepoBranchesToSyncQueue(ctx context.Context) error {
if err := db.Iterate(ctx, builder.Eq{"is_empty": false}, func(ctx context.Context, repo *repo_model.Repository) error {
return addRepoToBranchSyncQueue(repo.ID, doerID)
return addRepoToBranchSyncQueue(repo.ID)
}); err != nil {
return fmt.Errorf("run sync all branches failed: %v", err)
}

View File

@ -211,7 +211,7 @@ func ChangeRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *use
}
for _, file := range opts.Files {
if err := handleCheckErrors(file, commit, opts, repo); err != nil {
if err := handleCheckErrors(file, commit, opts); err != nil {
return nil, err
}
}
@ -277,7 +277,7 @@ func ChangeRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *use
}
// handles the check for various issues for ChangeRepoFiles
func handleCheckErrors(file *ChangeRepoFile, commit *git.Commit, opts *ChangeRepoFilesOptions, repo *repo_model.Repository) error {
func handleCheckErrors(file *ChangeRepoFile, commit *git.Commit, opts *ChangeRepoFilesOptions) error {
if file.Operation == "update" || file.Operation == "delete" {
fromEntry, err := commit.GetTreeEntryByPath(file.Options.fromTreePath)
if err != nil {

View File

@ -35,7 +35,7 @@ func TestUpdateUser(t *testing.T) {
Description: optional.Some("description"),
AllowGitHook: optional.Some(true),
AllowImportLocal: optional.Some(true),
MaxRepoCreation: optional.Some[int](10),
MaxRepoCreation: optional.Some(10),
IsRestricted: optional.Some(true),
IsActive: optional.Some(false),
IsAdmin: optional.Some(true),

View File

@ -45,6 +45,15 @@
</div>
{{end}}
{{if .PackageDescriptor.Metadata.BundleDependencies}}
<h4 class="ui top attached header">{{ctx.Locale.Tr "packages.npm.dependencies.bundle"}}</h4>
<div class="ui attached segment">
{{range .PackageDescriptor.Metadata.BundleDependencies}}
{{.}}
{{end}}
</div>
{{end}}
{{if .PackageDescriptor.Metadata.Keywords}}
<h4 class="ui top attached header">{{ctx.Locale.Tr "packages.keywords"}}</h4>
<div class="ui attached segment">

View File

@ -235,7 +235,7 @@
{{if and (not $.Repository.IsArchived) (not .DiffNotAvailable)}}
<template id="issue-comment-editor-template">
<div class="ui comment form">
<div class="ui form comment">
{{template "shared/combomarkdowneditor" (dict
"MarkdownPreviewUrl" (print $.Repository.Link "/markup")
"MarkdownPreviewContext" $.RepoLink
@ -249,7 +249,7 @@
{{end}}
<div class="text right edit buttons">
<button class="ui cancel button">{{ctx.Locale.Tr "repo.issues.cancel"}}</button>
<button class="ui primary save button">{{ctx.Locale.Tr "repo.issues.save"}}</button>
<button class="ui primary button">{{ctx.Locale.Tr "repo.issues.save"}}</button>
</div>
</div>
</template>

View File

@ -146,7 +146,7 @@
</div>
<template id="issue-comment-editor-template">
<div class="ui comment form">
<div class="ui form comment">
<div class="field">
{{template "shared/combomarkdowneditor" (dict
"MarkdownPreviewUrl" (print .Repository.Link "/markup")
@ -164,8 +164,8 @@
<div class="field">
<div class="text right edit">
<button class="ui basic cancel button">{{ctx.Locale.Tr "repo.issues.cancel"}}</button>
<button class="ui primary save button">{{ctx.Locale.Tr "repo.issues.save"}}</button>
<button class="ui cancel button">{{ctx.Locale.Tr "repo.issues.cancel"}}</button>
<button class="ui primary button">{{ctx.Locale.Tr "repo.issues.save"}}</button>
</div>
</div>
</div>

View File

@ -169,7 +169,7 @@ nwIDAQAB
assert.Nil(t, u)
assert.Error(t, err)
signRequest := func(t *testing.T, rw *RequestWrapper, version string) {
signRequest := func(rw *RequestWrapper, version string) {
req := rw.Request
username := req.Header.Get("X-Ops-Userid")
if version != "1.0" && version != "1.3" {
@ -255,7 +255,7 @@ nwIDAQAB
t.Run(v, func(t *testing.T) {
defer tests.PrintCurrentTest(t)()
signRequest(t, req, v)
signRequest(req, v)
u, err = auth.Verify(req.Request, nil, nil, nil)
assert.NotNil(t, u)
assert.NoError(t, err)

View File

@ -77,7 +77,7 @@ func TestAPIListReleases(t *testing.T) {
testFilterByLen(true, url.Values{"draft": {"true"}, "pre-release": {"true"}}, 0, "there is no pre-release draft")
}
func createNewReleaseUsingAPI(t *testing.T, session *TestSession, token string, owner *user_model.User, repo *repo_model.Repository, name, target, title, desc string) *api.Release {
func createNewReleaseUsingAPI(t *testing.T, token string, owner *user_model.User, repo *repo_model.Repository, name, target, title, desc string) *api.Release {
urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/releases", owner.Name, repo.Name)
req := NewRequestWithJSON(t, "POST", urlStr, &api.CreateReleaseOption{
TagName: name,
@ -120,7 +120,7 @@ func TestAPICreateAndUpdateRelease(t *testing.T) {
target, err := gitRepo.GetTagCommitID("v0.0.1")
assert.NoError(t, err)
newRelease := createNewReleaseUsingAPI(t, session, token, owner, repo, "v0.0.1", target, "v0.0.1", "test")
newRelease := createNewReleaseUsingAPI(t, token, owner, repo, "v0.0.1", target, "v0.0.1", "test")
urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/releases/%d", owner.Name, repo.Name, newRelease.ID)
req := NewRequest(t, "GET", urlStr).
@ -162,7 +162,7 @@ func TestAPICreateReleaseToDefaultBranch(t *testing.T) {
session := loginUser(t, owner.LowerName)
token := getTokenForLoggedInUser(t, session, auth_model.AccessTokenScopeWriteRepository)
createNewReleaseUsingAPI(t, session, token, owner, repo, "v0.0.1", "", "v0.0.1", "test")
createNewReleaseUsingAPI(t, token, owner, repo, "v0.0.1", "", "v0.0.1", "test")
}
func TestAPICreateReleaseToDefaultBranchOnExistingTag(t *testing.T) {
@ -180,7 +180,7 @@ func TestAPICreateReleaseToDefaultBranchOnExistingTag(t *testing.T) {
err = gitRepo.CreateTag("v0.0.1", "master")
assert.NoError(t, err)
createNewReleaseUsingAPI(t, session, token, owner, repo, "v0.0.1", "", "v0.0.1", "test")
createNewReleaseUsingAPI(t, token, owner, repo, "v0.0.1", "", "v0.0.1", "test")
}
func TestAPIGetLatestRelease(t *testing.T) {
@ -232,7 +232,7 @@ func TestAPIDeleteReleaseByTagName(t *testing.T) {
session := loginUser(t, owner.LowerName)
token := getTokenForLoggedInUser(t, session, auth_model.AccessTokenScopeWriteRepository)
createNewReleaseUsingAPI(t, session, token, owner, repo, "release-tag", "", "Release Tag", "test")
createNewReleaseUsingAPI(t, token, owner, repo, "release-tag", "", "Release Tag", "test")
// delete release
req := NewRequestf(t, http.MethodDelete, fmt.Sprintf("/api/v1/repos/%s/%s/releases/tags/release-tag", owner.Name, repo.Name)).
@ -258,7 +258,7 @@ func TestAPIUploadAssetRelease(t *testing.T) {
session := loginUser(t, owner.LowerName)
token := getTokenForLoggedInUser(t, session, auth_model.AccessTokenScopeWriteRepository)
r := createNewReleaseUsingAPI(t, session, token, owner, repo, "release-tag", "", "Release Tag", "test")
r := createNewReleaseUsingAPI(t, token, owner, repo, "release-tag", "", "Release Tag", "test")
filename := "image.png"
buff := generateImg()

View File

@ -80,7 +80,7 @@ func TestAPIDeleteTagByName(t *testing.T) {
_ = MakeRequest(t, req, http.StatusNoContent)
// Make sure that actual releases can't be deleted outright
createNewReleaseUsingAPI(t, session, token, owner, repo, "release-tag", "", "Release Tag", "test")
createNewReleaseUsingAPI(t, token, owner, repo, "release-tag", "", "Release Tag", "test")
req = NewRequest(t, http.MethodDelete, fmt.Sprintf("/api/v1/repos/%s/%s/tags/release-tag", owner.Name, repo.Name)).
AddTokenAuth(token)

View File

@ -4,7 +4,7 @@
package integration
import (
"math/rand"
"math/rand/v2"
"net/http"
"net/url"
"testing"
@ -18,7 +18,7 @@ import (
func StringWithCharset(length int, charset string) string {
b := make([]byte, length)
for i := range b {
b[i] = charset[rand.Intn(len(charset))]
b[i] = charset[rand.IntN(len(charset))]
}
return string(b)
}
@ -37,7 +37,7 @@ func BenchmarkRepoBranchCommit(b *testing.B) {
b.ResetTimer()
b.Run("CreateBranch", func(b *testing.B) {
b.StopTimer()
branchName := StringWithCharset(5+rand.Intn(10), "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789")
branchName := StringWithCharset(5+rand.IntN(10), "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789")
b.StartTimer()
for i := 0; i < b.N; i++ {
b.Run("new_"+branchName, func(b *testing.B) {

View File

@ -5,9 +5,9 @@ package integration
import (
"bytes"
"crypto/rand"
"encoding/hex"
"fmt"
"math/rand"
"net/http"
"net/url"
"os"

View File

@ -19,9 +19,9 @@ import (
"code.gitea.io/gitea/modules/test"
"code.gitea.io/gitea/tests"
"github.com/ProtonMail/go-crypto/openpgp"
"github.com/ProtonMail/go-crypto/openpgp/armor"
"github.com/stretchr/testify/assert"
"golang.org/x/crypto/openpgp"
"golang.org/x/crypto/openpgp/armor"
)
func TestGPGGit(t *testing.T) {

View File

@ -17,7 +17,7 @@ import (
"github.com/stretchr/testify/assert"
)
func resultFilenames(t testing.TB, doc *HTMLDoc) []string {
func resultFilenames(doc *HTMLDoc) []string {
filenameSelections := doc.doc.Find(".repository.search").Find(".repo-search-result").Find(".header").Find("span.file")
result := make([]string, filenameSelections.Length())
filenameSelections.Each(func(i int, selection *goquery.Selection) {
@ -56,6 +56,6 @@ func testSearch(t *testing.T, url string, expected []string) {
req := NewRequest(t, "GET", url)
resp := MakeRequest(t, req, http.StatusOK)
filenames := resultFilenames(t, NewHTMLParser(t, resp.Body))
filenames := resultFilenames(NewHTMLParser(t, resp.Body))
assert.EqualValues(t, expected, filenames)
}

View File

@ -78,7 +78,7 @@ func getDeleteRepoFilesOptions(repo *repo_model.Repository) *files_service.Chang
}
}
func getExpectedFileResponseForRepofilesDelete(u *url.URL) *api.FileResponse {
func getExpectedFileResponseForRepofilesDelete() *api.FileResponse {
// Just returns fields that don't change, i.e. fields with commit SHAs and dates can't be determined
return &api.FileResponse{
Content: nil,
@ -418,7 +418,7 @@ func testDeleteRepoFiles(t *testing.T, u *url.URL) {
t.Run("Delete README.md file", func(t *testing.T) {
filesResponse, err := files_service.ChangeRepoFiles(git.DefaultContext, repo, doer, opts)
assert.NoError(t, err)
expectedFileResponse := getExpectedFileResponseForRepofilesDelete(u)
expectedFileResponse := getExpectedFileResponseForRepofilesDelete()
assert.NotNil(t, filesResponse)
assert.Nil(t, filesResponse.Files[0])
assert.EqualValues(t, expectedFileResponse.Commit.Message, filesResponse.Commit.Message)
@ -460,7 +460,7 @@ func testDeleteRepoFilesWithoutBranchNames(t *testing.T, u *url.URL) {
t.Run("Delete README.md without Branch Name", func(t *testing.T) {
filesResponse, err := files_service.ChangeRepoFiles(git.DefaultContext, repo, doer, opts)
assert.NoError(t, err)
expectedFileResponse := getExpectedFileResponseForRepofilesDelete(u)
expectedFileResponse := getExpectedFileResponseForRepofilesDelete()
assert.NotNil(t, filesResponse)
assert.Nil(t, filesResponse.Files[0])
assert.EqualValues(t, expectedFileResponse.Commit.Message, filesResponse.Commit.Message)

View File

@ -680,10 +680,6 @@ input:-webkit-autofill:active,
box-shadow: 0 6px 18px var(--color-shadow) !important;
}
.ui.dimmer {
background: var(--color-overlay-backdrop);
}
.ui.dropdown .menu > .header {
font-size: 0.8em;
}

View File

@ -16,6 +16,7 @@
@import "./modules/table.css";
@import "./modules/card.css";
@import "./modules/checkbox.css";
@import "./modules/dimmer.css";
@import "./modules/modal.css";
@import "./modules/select.css";

View File

@ -0,0 +1,30 @@
/* These are the remnants of the fomantic dimmer module */
.ui.dimmer {
position: fixed;
display: none;
top: 0;
left: 0;
right: 0;
bottom: 0;
background: var(--color-overlay-backdrop);
opacity: 0;
z-index: 1000;
overflow-y: auto;
justify-content: center;
padding: 8px 0;
animation-name: fadein;
animation-duration: .2s;
user-select: none;
}
.ui.active.dimmer {
display: flex;
opacity: 1;
}
.ui.dimmer > * {
position: static;
margin-top: auto !important;
margin-bottom: auto !important;
}

View File

@ -8,363 +8,6 @@
* http://opensource.org/licenses/MIT
*
*/
/*!
* # Fomantic-UI - Dimmer
* http://github.com/fomantic/Fomantic-UI/
*
*
* Released under the MIT license
* http://opensource.org/licenses/MIT
*
*/
/*******************************
Dimmer
*******************************/
.dimmable:not(body) {
position: relative;
}
.ui.dimmer {
display: none;
position: absolute;
top: 0 !important;
left: 0 !important;
width: 100%;
height: 100%;
text-align: center;
vertical-align: middle;
padding: 1em;
background: rgba(0, 0, 0, 0.85);
opacity: 0;
line-height: 1;
animation-fill-mode: both;
animation-duration: 0.5s;
transition: background-color 0.5s linear;
flex-direction: column;
align-items: center;
justify-content: center;
-webkit-user-select: none;
-moz-user-select: none;
user-select: none;
will-change: opacity;
z-index: 1000;
}
/* Dimmer Content */
.ui.dimmer > .content {
-webkit-user-select: text;
-moz-user-select: text;
user-select: text;
color: #FFFFFF;
}
/* Loose Coupling */
.ui.segment > .ui.dimmer:not(.page) {
border-radius: inherit;
}
/* Scrollbars */
/*******************************
States
*******************************/
/* Animating */
.animating.dimmable:not(body),
.dimmed.dimmable:not(body) {
overflow: hidden;
}
/* Animating / Active / Visible */
.dimmed.dimmable > .ui.animating.dimmer,
.dimmed.dimmable > .ui.visible.dimmer,
.ui.active.dimmer {
display: flex;
opacity: 1;
}
/* Disabled */
.ui.disabled.dimmer {
width: 0 !important;
height: 0 !important;
}
/*******************************
Variations
*******************************/
/*--------------
Legacy
---------------*/
/* Animating / Active / Visible */
.dimmed.dimmable > .ui.animating.legacy.dimmer,
.dimmed.dimmable > .ui.visible.legacy.dimmer,
.ui.active.legacy.dimmer {
display: block;
}
/*--------------
Alignment
---------------*/
.ui[class*="top aligned"].dimmer {
justify-content: flex-start;
}
.ui[class*="bottom aligned"].dimmer {
justify-content: flex-end;
}
/*--------------
Page
---------------*/
.ui.page.dimmer {
position: fixed;
transform-style: '';
perspective: 2000px;
transform-origin: center center;
}
.ui.page.dimmer.modals {
-moz-perspective: none;
}
body.animating.in.dimmable,
body.dimmed.dimmable {
overflow: hidden;
}
body.dimmable > .dimmer {
position: fixed;
}
/*--------------
Blurring
---------------*/
.blurring.dimmable > :not(.dimmer) {
filter: initial;
transition: 800ms filter ease;
}
.blurring.dimmed.dimmable > :not(.dimmer):not(.popup) {
filter: blur(5px) grayscale(0.7);
}
/* Dimmer Color */
.blurring.dimmable > .dimmer {
background: rgba(0, 0, 0, 0.6);
}
.blurring.dimmable > .inverted.dimmer {
background: rgba(255, 255, 255, 0.6);
}
/*--------------
Aligned
---------------*/
.ui.dimmer > .top.aligned.content > * {
vertical-align: top;
}
.ui.dimmer > .bottom.aligned.content > * {
vertical-align: bottom;
}
/*--------------
Shades
---------------*/
.medium.medium.medium.medium.medium.dimmer {
background: rgba(0, 0, 0, 0.65);
}
.light.light.light.light.light.dimmer {
background: rgba(0, 0, 0, 0.45);
}
.very.light.light.light.light.dimmer {
background: rgba(0, 0, 0, 0.25);
}
/*--------------
Simple
---------------*/
/* Displays without javascript */
.ui.simple.dimmer {
display: block;
overflow: hidden;
opacity: 0;
width: 0;
height: 0;
z-index: -100;
background: rgba(0, 0, 0, 0);
}
.dimmed.dimmable > .ui.simple.dimmer {
overflow: visible;
opacity: 1;
width: 100%;
height: 100%;
background: rgba(0, 0, 0, 0.85);
z-index: 1;
}
.ui.simple.inverted.dimmer {
background: rgba(255, 255, 255, 0);
}
.dimmed.dimmable > .ui.simple.inverted.dimmer {
background: rgba(255, 255, 255, 0.85);
}
/*--------------
Partially
----------------*/
.ui[class*="top dimmer"],
.ui[class*="center dimmer"],
.ui[class*="bottom dimmer"] {
height: auto;
}
.ui[class*="bottom dimmer"] {
top: auto !important;
bottom: 0;
}
.ui[class*="center dimmer"] {
top: 50% !important;
transform: translateY(-50%);
-webkit-transform: translateY(calc(-50% - 0.5px));
}
.ui.segment > .ui.ui[class*="top dimmer"] {
border-bottom-left-radius: 0;
border-bottom-right-radius: 0;
}
.ui.segment > .ui.ui[class*="center dimmer"] {
border-radius: 0;
}
.ui.segment > .ui.ui[class*="bottom dimmer"] {
border-top-left-radius: 0;
border-top-right-radius: 0;
}
.ui[class*="center dimmer"].transition[class*="fade up"].in {
animation-name: fadeInUpCenter;
}
.ui[class*="center dimmer"].transition[class*="fade down"].in {
animation-name: fadeInDownCenter;
}
.ui[class*="center dimmer"].transition[class*="fade up"].out {
animation-name: fadeOutUpCenter;
}
.ui[class*="center dimmer"].transition[class*="fade down"].out {
animation-name: fadeOutDownCenter;
}
.ui[class*="center dimmer"].bounce.transition {
animation-name: bounceCenter;
}
@keyframes fadeInUpCenter {
0% {
opacity: 0;
transform: translateY(-40%);
-webkit-transform: translateY(calc(-40% - 0.5px));
}
100% {
opacity: 1;
transform: translateY(-50%);
-webkit-transform: translateY(calc(-50% - 0.5px));
}
}
@keyframes fadeInDownCenter {
0% {
opacity: 0;
transform: translateY(-60%);
-webkit-transform: translateY(calc(-60% - 0.5px));
}
100% {
opacity: 1;
transform: translateY(-50%);
-webkit-transform: translateY(calc(-50% - 0.5px));
}
}
@keyframes fadeOutUpCenter {
0% {
opacity: 1;
transform: translateY(-50%);
-webkit-transform: translateY(calc(-50% - 0.5px));
}
100% {
opacity: 0;
transform: translateY(-45%);
-webkit-transform: translateY(calc(-45% - 0.5px));
}
}
@keyframes fadeOutDownCenter {
0% {
opacity: 1;
transform: translateY(-50%);
-webkit-transform: translateY(calc(-50% - 0.5px));
}
100% {
opacity: 0;
transform: translateY(-55%);
-webkit-transform: translateY(calc(-55% - 0.5px));
}
}
@keyframes bounceCenter {
0%, 20%, 50%, 80%, 100% {
transform: translateY(-50%);
-webkit-transform: translateY(calc(-50% - 0.5px));
}
40% {
transform: translateY(calc(-50% - 30px));
}
60% {
transform: translateY(calc(-50% - 15px));
}
}
/*******************************
Theme Overrides
*******************************/
/*******************************
User Overrides
*******************************/
/*!
* # Fomantic-UI - Dropdown
* http://github.com/fomantic/Fomantic-UI/

View File

@ -1184,760 +1184,6 @@ $.api.settings = {
})( jQuery, window, document );
/*!
* # Fomantic-UI - Dimmer
* http://github.com/fomantic/Fomantic-UI/
*
*
* Released under the MIT license
* http://opensource.org/licenses/MIT
*
*/
;(function ($, window, document, undefined) {
'use strict';
$.isFunction = $.isFunction || function(obj) {
return typeof obj === "function" && typeof obj.nodeType !== "number";
};
window = (typeof window != 'undefined' && window.Math == Math)
? window
: (typeof self != 'undefined' && self.Math == Math)
? self
: Function('return this')()
;
$.fn.dimmer = function(parameters) {
var
$allModules = $(this),
time = new Date().getTime(),
performance = [],
query = arguments[0],
methodInvoked = (typeof query == 'string'),
queryArguments = [].slice.call(arguments, 1),
returnedValue
;
$allModules
.each(function() {
var
settings = ( $.isPlainObject(parameters) )
? $.extend(true, {}, $.fn.dimmer.settings, parameters)
: $.extend({}, $.fn.dimmer.settings),
selector = settings.selector,
namespace = settings.namespace,
className = settings.className,
error = settings.error,
eventNamespace = '.' + namespace,
moduleNamespace = 'module-' + namespace,
moduleSelector = $allModules.selector || '',
clickEvent = "click", unstableClickEvent = ('ontouchstart' in document.documentElement)
? 'touchstart'
: 'click',
$module = $(this),
$dimmer,
$dimmable,
element = this,
instance = $module.data(moduleNamespace),
module
;
module = {
preinitialize: function() {
if( module.is.dimmer() ) {
$dimmable = $module.parent();
$dimmer = $module;
}
else {
$dimmable = $module;
if( module.has.dimmer() ) {
if(settings.dimmerName) {
$dimmer = $dimmable.find(selector.dimmer).filter('.' + settings.dimmerName);
}
else {
$dimmer = $dimmable.find(selector.dimmer);
}
}
else {
$dimmer = module.create();
}
}
},
initialize: function() {
module.debug('Initializing dimmer', settings);
module.bind.events();
module.set.dimmable();
module.instantiate();
},
instantiate: function() {
module.verbose('Storing instance of module', module);
instance = module;
$module
.data(moduleNamespace, instance)
;
},
destroy: function() {
module.verbose('Destroying previous module', $dimmer);
module.unbind.events();
module.remove.variation();
$dimmable
.off(eventNamespace)
;
},
bind: {
events: function() {
if(settings.on == 'hover') {
$dimmable
.on('mouseenter' + eventNamespace, module.show)
.on('mouseleave' + eventNamespace, module.hide)
;
}
else if(settings.on == 'click') {
$dimmable
.on(clickEvent + eventNamespace, module.toggle)
;
}
if( module.is.page() ) {
module.debug('Setting as a page dimmer', $dimmable);
module.set.pageDimmer();
}
if( module.is.closable() ) {
module.verbose('Adding dimmer close event', $dimmer);
$dimmable
.on(clickEvent + eventNamespace, selector.dimmer, module.event.click)
;
}
}
},
unbind: {
events: function() {
$module
.removeData(moduleNamespace)
;
$dimmable
.off(eventNamespace)
;
}
},
event: {
click: function(event) {
module.verbose('Determining if event occured on dimmer', event);
if( $dimmer.find(event.target).length === 0 || $(event.target).is(selector.content) ) {
module.hide();
event.stopImmediatePropagation();
}
}
},
addContent: function(element) {
var
$content = $(element)
;
module.debug('Add content to dimmer', $content);
if($content.parent()[0] !== $dimmer[0]) {
$content.detach().appendTo($dimmer);
}
},
create: function() {
var
$element = $( settings.template.dimmer(settings) )
;
if(settings.dimmerName) {
module.debug('Creating named dimmer', settings.dimmerName);
$element.addClass(settings.dimmerName);
}
$element
.appendTo($dimmable)
;
return $element;
},
show: function(callback) {
callback = $.isFunction(callback)
? callback
: function(){}
;
module.debug('Showing dimmer', $dimmer, settings);
module.set.variation();
if( (!module.is.dimmed() || module.is.animating()) && module.is.enabled() ) {
module.animate.show(callback);
settings.onShow.call(element);
settings.onChange.call(element);
}
else {
module.debug('Dimmer is already shown or disabled');
}
},
hide: function(callback) {
callback = $.isFunction(callback)
? callback
: function(){}
;
if( module.is.dimmed() || module.is.animating() ) {
module.debug('Hiding dimmer', $dimmer);
module.animate.hide(callback);
settings.onHide.call(element);
settings.onChange.call(element);
}
else {
module.debug('Dimmer is not visible');
}
},
toggle: function() {
module.verbose('Toggling dimmer visibility', $dimmer);
if( !module.is.dimmed() ) {
module.show();
}
else {
if ( module.is.closable() ) {
module.hide();
}
}
},
animate: {
show: function(callback) {
callback = $.isFunction(callback)
? callback
: function(){}
;
if(settings.useCSS && $.fn.transition !== undefined && $dimmer.transition('is supported')) {
if(settings.useFlex) {
module.debug('Using flex dimmer');
module.remove.legacy();
}
else {
module.debug('Using legacy non-flex dimmer');
module.set.legacy();
}
if(settings.opacity !== 'auto') {
module.set.opacity();
}
$dimmer
.transition({
displayType : settings.useFlex
? 'flex'
: 'block',
animation : settings.transition + ' in',
queue : false,
duration : module.get.duration(),
useFailSafe : true,
onStart : function() {
module.set.dimmed();
},
onComplete : function() {
module.set.active();
callback();
}
})
;
}
else {
module.verbose('Showing dimmer animation with javascript');
module.set.dimmed();
if(settings.opacity == 'auto') {
settings.opacity = 0.8;
}
$dimmer
.stop()
.css({
opacity : 0,
width : '100%',
height : '100%'
})
.fadeTo(module.get.duration(), settings.opacity, function() {
$dimmer.removeAttr('style');
module.set.active();
callback();
})
;
}
},
hide: function(callback) {
callback = $.isFunction(callback)
? callback
: function(){}
;
if(settings.useCSS && $.fn.transition !== undefined && $dimmer.transition('is supported')) {
module.verbose('Hiding dimmer with css');
$dimmer
.transition({
displayType : settings.useFlex
? 'flex'
: 'block',
animation : settings.transition + ' out',
queue : false,
duration : module.get.duration(),
useFailSafe : true,
onComplete : function() {
module.remove.dimmed();
module.remove.variation();
module.remove.active();
callback();
}
})
;
}
else {
module.verbose('Hiding dimmer with javascript');
$dimmer
.stop()
.fadeOut(module.get.duration(), function() {
module.remove.dimmed();
module.remove.active();
$dimmer.removeAttr('style');
callback();
})
;
}
}
},
get: {
dimmer: function() {
return $dimmer;
},
duration: function() {
if(typeof settings.duration == 'object') {
if( module.is.active() ) {
return settings.duration.hide;
}
else {
return settings.duration.show;
}
}
return settings.duration;
}
},
has: {
dimmer: function() {
if(settings.dimmerName) {
return ($module.find(selector.dimmer).filter('.' + settings.dimmerName).length > 0);
}
else {
return ( $module.find(selector.dimmer).length > 0 );
}
}
},
is: {
active: function() {
return $dimmer.hasClass(className.active);
},
animating: function() {
return ( $dimmer.is(':animated') || $dimmer.hasClass(className.animating) );
},
closable: function() {
if(settings.closable == 'auto') {
if(settings.on == 'hover') {
return false;
}
return true;
}
return settings.closable;
},
dimmer: function() {
return $module.hasClass(className.dimmer);
},
dimmable: function() {
return $module.hasClass(className.dimmable);
},
dimmed: function() {
return $dimmable.hasClass(className.dimmed);
},
disabled: function() {
return $dimmable.hasClass(className.disabled);
},
enabled: function() {
return !module.is.disabled();
},
page: function () {
return $dimmable.is('body');
},
pageDimmer: function() {
return $dimmer.hasClass(className.pageDimmer);
}
},
can: {
show: function() {
return !$dimmer.hasClass(className.disabled);
}
},
set: {
opacity: function(opacity) {
var
color = $dimmer.css('background-color'),
colorArray = color.split(','),
isRGB = (colorArray && colorArray.length >= 3)
;
opacity = settings.opacity === 0 ? 0 : settings.opacity || opacity;
if(isRGB) {
colorArray[2] = colorArray[2].replace(')','');
colorArray[3] = opacity + ')';
color = colorArray.join(',');
}
else {
color = 'rgba(0, 0, 0, ' + opacity + ')';
}
module.debug('Setting opacity to', opacity);
$dimmer.css('background-color', color);
},
legacy: function() {
$dimmer.addClass(className.legacy);
},
active: function() {
$dimmer.addClass(className.active);
},
dimmable: function() {
$dimmable.addClass(className.dimmable);
},
dimmed: function() {
$dimmable.addClass(className.dimmed);
},
pageDimmer: function() {
$dimmer.addClass(className.pageDimmer);
},
disabled: function() {
$dimmer.addClass(className.disabled);
},
variation: function(variation) {
variation = variation || settings.variation;
if(variation) {
$dimmer.addClass(variation);
}
}
},
remove: {
active: function() {
$dimmer
.removeClass(className.active)
;
},
legacy: function() {
$dimmer.removeClass(className.legacy);
},
dimmed: function() {
$dimmable.removeClass(className.dimmed);
},
disabled: function() {
$dimmer.removeClass(className.disabled);
},
variation: function(variation) {
variation = variation || settings.variation;
if(variation) {
$dimmer.removeClass(variation);
}
}
},
setting: function(name, value) {
module.debug('Changing setting', name, value);
if( $.isPlainObject(name) ) {
$.extend(true, settings, name);
}
else if(value !== undefined) {
if($.isPlainObject(settings[name])) {
$.extend(true, settings[name], value);
}
else {
settings[name] = value;
}
}
else {
return settings[name];
}
},
internal: function(name, value) {
if( $.isPlainObject(name) ) {
$.extend(true, module, name);
}
else if(value !== undefined) {
module[name] = value;
}
else {
return module[name];
}
},
debug: function() {
if(!settings.silent && settings.debug) {
if(settings.performance) {
module.performance.log(arguments);
}
else {
module.debug = Function.prototype.bind.call(console.info, console, settings.name + ':');
module.debug.apply(console, arguments);
}
}
},
verbose: function() {
if(!settings.silent && settings.verbose && settings.debug) {
if(settings.performance) {
module.performance.log(arguments);
}
else {
module.verbose = Function.prototype.bind.call(console.info, console, settings.name + ':');
module.verbose.apply(console, arguments);
}
}
},
error: function() {
if(!settings.silent) {
module.error = Function.prototype.bind.call(console.error, console, settings.name + ':');
module.error.apply(console, arguments);
}
},
performance: {
log: function(message) {
var
currentTime,
executionTime,
previousTime
;
if(settings.performance) {
currentTime = new Date().getTime();
previousTime = time || currentTime;
executionTime = currentTime - previousTime;
time = currentTime;
performance.push({
'Name' : message[0],
'Arguments' : [].slice.call(message, 1) || '',
'Element' : element,
'Execution Time' : executionTime
});
}
clearTimeout(module.performance.timer);
module.performance.timer = setTimeout(module.performance.display, 500);
},
display: function() {
var
title = settings.name + ':',
totalTime = 0
;
time = false;
clearTimeout(module.performance.timer);
$.each(performance, function(index, data) {
totalTime += data['Execution Time'];
});
title += ' ' + totalTime + 'ms';
if(moduleSelector) {
title += ' \'' + moduleSelector + '\'';
}
if($allModules.length > 1) {
title += ' ' + '(' + $allModules.length + ')';
}
if( (console.group !== undefined || console.table !== undefined) && performance.length > 0) {
console.groupCollapsed(title);
if(console.table) {
console.table(performance);
}
else {
$.each(performance, function(index, data) {
console.log(data['Name'] + ': ' + data['Execution Time']+'ms');
});
}
console.groupEnd();
}
performance = [];
}
},
invoke: function(query, passedArguments, context) {
var
object = instance,
maxDepth,
found,
response
;
passedArguments = passedArguments || queryArguments;
context = element || context;
if(typeof query == 'string' && object !== undefined) {
query = query.split(/[\. ]/);
maxDepth = query.length - 1;
$.each(query, function(depth, value) {
var camelCaseValue = (depth != maxDepth)
? value + query[depth + 1].charAt(0).toUpperCase() + query[depth + 1].slice(1)
: query
;
if( $.isPlainObject( object[camelCaseValue] ) && (depth != maxDepth) ) {
object = object[camelCaseValue];
}
else if( object[camelCaseValue] !== undefined ) {
found = object[camelCaseValue];
return false;
}
else if( $.isPlainObject( object[value] ) && (depth != maxDepth) ) {
object = object[value];
}
else if( object[value] !== undefined ) {
found = object[value];
return false;
}
else {
module.error(error.method, query);
return false;
}
});
}
if ( $.isFunction( found ) ) {
response = found.apply(context, passedArguments);
}
else if(found !== undefined) {
response = found;
}
if(Array.isArray(returnedValue)) {
returnedValue.push(response);
}
else if(returnedValue !== undefined) {
returnedValue = [returnedValue, response];
}
else if(response !== undefined) {
returnedValue = response;
}
return found;
}
};
module.preinitialize();
if(methodInvoked) {
if(instance === undefined) {
module.initialize();
}
module.invoke(query);
}
else {
if(instance !== undefined) {
instance.invoke('destroy');
}
module.initialize();
}
})
;
return (returnedValue !== undefined)
? returnedValue
: this
;
};
$.fn.dimmer.settings = {
name : 'Dimmer',
namespace : 'dimmer',
silent : false,
debug : false,
verbose : false,
performance : true,
// whether should use flex layout
useFlex : true,
// name to distinguish between multiple dimmers in context
dimmerName : false,
// whether to add a variation type
variation : false,
// whether to bind close events
closable : 'auto',
// whether to use css animations
useCSS : true,
// css animation to use
transition : 'fade',
// event to bind to
on : false,
// overriding opacity value
opacity : 'auto',
// transition durations
duration : {
show : 500,
hide : 500
},
// whether the dynamically created dimmer should have a loader
displayLoader: false,
loaderText : false,
loaderVariation : '',
onChange : function(){},
onShow : function(){},
onHide : function(){},
error : {
method : 'The method you called is not defined.'
},
className : {
active : 'active',
animating : 'animating',
dimmable : 'dimmable',
dimmed : 'dimmed',
dimmer : 'dimmer',
disabled : 'disabled',
hide : 'hide',
legacy : 'legacy',
pageDimmer : 'page',
show : 'show',
loader : 'ui loader'
},
selector: {
dimmer : '> .ui.dimmer',
content : '.ui.dimmer > .content, .ui.dimmer > .content > .center'
},
template: {
dimmer: function(settings) {
var d = $('<div/>').addClass('ui dimmer'),l;
if(settings.displayLoader) {
l = $('<div/>')
.addClass(settings.className.loader)
.addClass(settings.loaderVariation);
if(!!settings.loaderText){
l.text(settings.loaderText);
l.addClass('text');
}
d.append(l);
}
return d;
}
}
};
})( jQuery, window, document );
/*!

View File

@ -22,7 +22,6 @@
"admin": false,
"components": [
"api",
"dimmer",
"dropdown",
"form",
"modal",

View File

@ -1,5 +1,5 @@
export function handleGlobalEnterQuickSubmit(target) {
const form = target.closest('form');
let form = target.closest('form');
if (form) {
if (!form.checkValidity()) {
form.reportValidity();
@ -9,5 +9,10 @@ export function handleGlobalEnterQuickSubmit(target) {
// here use the event to trigger the submit event (instead of calling `submit()` method directly)
// otherwise the `areYouSure` handler won't be executed, then there will be an annoying "confirm to leave" dialog
form.dispatchEvent(new SubmitEvent('submit', {bubbles: true, cancelable: true}));
return;
}
form = target.closest('.ui.form');
if (form) {
form.querySelector('.ui.primary.button')?.click();
}
}

View File

@ -162,8 +162,8 @@ async function onEditContent(event) {
editContentZone.innerHTML = document.getElementById('issue-comment-editor-template').innerHTML;
comboMarkdownEditor = await initComboMarkdownEditor(editContentZone.querySelector('.combo-markdown-editor'));
comboMarkdownEditor.attachedDropzoneInst = await setupDropzone(editContentZone.querySelector('.dropzone'));
editContentZone.querySelector('.cancel.button').addEventListener('click', cancelAndReset);
editContentZone.querySelector('.save.button').addEventListener('click', saveAndRefresh);
editContentZone.querySelector('.ui.cancel.button').addEventListener('click', cancelAndReset);
editContentZone.querySelector('.ui.primary.button').addEventListener('click', saveAndRefresh);
}
// Show write/preview tab and copy raw content as needed

View File

@ -5,6 +5,7 @@ import {initAriaFormFieldPatch} from './fomantic/form.js';
import {initAriaDropdownPatch} from './fomantic/dropdown.js';
import {initAriaModalPatch} from './fomantic/modal.js';
import {initFomanticTransition} from './fomantic/transition.js';
import {initFomanticDimmer} from './fomantic/dimmer.js';
import {svg} from '../svg.js';
export const fomanticMobileScreen = window.matchMedia('only screen and (max-width: 767.98px)');
@ -24,6 +25,7 @@ export function initGiteaFomantic() {
};
initFomanticTransition();
initFomanticDimmer();
initFomanticApiPatch();
// Use the patches to improve accessibility, these patches are designed to be as independent as possible, make it easy to modify or remove in the future.

View File

@ -0,0 +1,29 @@
import $ from 'jquery';
import {queryElemChildren} from '../../utils/dom.js';
export function initFomanticDimmer() {
// stand-in for removed dimmer module
$.fn.dimmer = function (arg0, $el) {
if (arg0 === 'add content') {
const existingDimmer = document.querySelector('body > .ui.dimmer');
if (existingDimmer) {
queryElemChildren(existingDimmer, '*', (el) => el.remove());
this._dimmer = existingDimmer;
} else {
this._dimmer = document.createElement('div');
this._dimmer.classList.add('ui', 'dimmer');
document.body.append(this._dimmer);
}
this._dimmer.append($el[0]);
} else if (arg0 === 'get dimmer') {
return $(this._dimmer);
} else if (arg0 === 'show') {
this._dimmer.classList.add('active');
document.body.classList.add('tw-overflow-hidden');
} else if (arg0 === 'hide') {
this._dimmer.classList.remove('active');
document.body.classList.remove('tw-overflow-hidden');
}
return this;
};
}