mirror of
https://github.com/go-gitea/gitea
synced 2025-01-24 16:44:28 +00:00
Merge branch 'main' into lunny/issue_dev
This commit is contained in:
commit
fcc2c577dc
@ -171,7 +171,10 @@ func (a *Action) TableIndices() []*schemas.Index {
|
|||||||
cudIndex := schemas.NewIndex("c_u_d", schemas.IndexType)
|
cudIndex := schemas.NewIndex("c_u_d", schemas.IndexType)
|
||||||
cudIndex.AddColumn("created_unix", "user_id", "is_deleted")
|
cudIndex.AddColumn("created_unix", "user_id", "is_deleted")
|
||||||
|
|
||||||
indices := []*schemas.Index{actUserIndex, repoIndex, cudIndex}
|
cuIndex := schemas.NewIndex("c_u", schemas.IndexType)
|
||||||
|
cuIndex.AddColumn("user_id", "is_deleted")
|
||||||
|
|
||||||
|
indices := []*schemas.Index{actUserIndex, repoIndex, cudIndex, cuIndex}
|
||||||
|
|
||||||
return indices
|
return indices
|
||||||
}
|
}
|
||||||
|
@ -365,7 +365,8 @@ func prepareMigrationTasks() []*migration {
|
|||||||
newMigration(305, "Add Repository Licenses", v1_23.AddRepositoryLicenses),
|
newMigration(305, "Add Repository Licenses", v1_23.AddRepositoryLicenses),
|
||||||
newMigration(306, "Add BlockAdminMergeOverride to ProtectedBranch", v1_23.AddBlockAdminMergeOverrideBranchProtection),
|
newMigration(306, "Add BlockAdminMergeOverride to ProtectedBranch", v1_23.AddBlockAdminMergeOverrideBranchProtection),
|
||||||
newMigration(307, "Fix milestone deadline_unix when there is no due date", v1_23.FixMilestoneNoDueDate),
|
newMigration(307, "Fix milestone deadline_unix when there is no due date", v1_23.FixMilestoneNoDueDate),
|
||||||
newMigration(308, "Add table issue_dev_link", v1_23.CreateTableIssueDevLink),
|
newMigration(308, "Add index(user_id, is_deleted) for action table", v1_23.AddNewIndexForUserDashboard),
|
||||||
|
newMigration(309, "Add table issue_dev_link", v1_23.CreateTableIssueDevLink),
|
||||||
}
|
}
|
||||||
return preparedMigrations
|
return preparedMigrations
|
||||||
}
|
}
|
||||||
|
@ -7,16 +7,46 @@ import (
|
|||||||
"code.gitea.io/gitea/modules/timeutil"
|
"code.gitea.io/gitea/modules/timeutil"
|
||||||
|
|
||||||
"xorm.io/xorm"
|
"xorm.io/xorm"
|
||||||
|
"xorm.io/xorm/schemas"
|
||||||
)
|
)
|
||||||
|
|
||||||
func CreateTableIssueDevLink(x *xorm.Engine) error {
|
type improveActionTableIndicesAction struct {
|
||||||
type IssueDevLink struct {
|
ID int64 `xorm:"pk autoincr"`
|
||||||
ID int64 `xorm:"pk autoincr"`
|
UserID int64 `xorm:"INDEX"` // Receiver user id.
|
||||||
IssueID int64 `xorm:"INDEX"`
|
OpType int
|
||||||
LinkType int
|
ActUserID int64 // Action user id.
|
||||||
LinkedRepoID int64 `xorm:"INDEX"` // it can link to self repo or other repo
|
RepoID int64
|
||||||
LinkIndex string // branch name, pull request number or commit sha
|
CommentID int64 `xorm:"INDEX"`
|
||||||
CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
|
IsDeleted bool `xorm:"NOT NULL DEFAULT false"`
|
||||||
}
|
RefName string
|
||||||
return x.Sync(new(IssueDevLink))
|
IsPrivate bool `xorm:"NOT NULL DEFAULT false"`
|
||||||
|
Content string `xorm:"TEXT"`
|
||||||
|
CreatedUnix timeutil.TimeStamp `xorm:"created"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableName sets the name of this table
|
||||||
|
func (*improveActionTableIndicesAction) TableName() string {
|
||||||
|
return "action"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *improveActionTableIndicesAction) TableIndices() []*schemas.Index {
|
||||||
|
repoIndex := schemas.NewIndex("r_u_d", schemas.IndexType)
|
||||||
|
repoIndex.AddColumn("repo_id", "user_id", "is_deleted")
|
||||||
|
|
||||||
|
actUserIndex := schemas.NewIndex("au_r_c_u_d", schemas.IndexType)
|
||||||
|
actUserIndex.AddColumn("act_user_id", "repo_id", "created_unix", "user_id", "is_deleted")
|
||||||
|
|
||||||
|
cudIndex := schemas.NewIndex("c_u_d", schemas.IndexType)
|
||||||
|
cudIndex.AddColumn("created_unix", "user_id", "is_deleted")
|
||||||
|
|
||||||
|
cuIndex := schemas.NewIndex("c_u", schemas.IndexType)
|
||||||
|
cuIndex.AddColumn("user_id", "is_deleted")
|
||||||
|
|
||||||
|
indices := []*schemas.Index{actUserIndex, repoIndex, cudIndex, cuIndex}
|
||||||
|
|
||||||
|
return indices
|
||||||
|
}
|
||||||
|
|
||||||
|
func AddNewIndexForUserDashboard(x *xorm.Engine) error {
|
||||||
|
return x.Sync(new(improveActionTableIndicesAction))
|
||||||
}
|
}
|
||||||
|
22
models/migrations/v1_23/v309.go
Normal file
22
models/migrations/v1_23/v309.go
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
// Copyright 2024 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package v1_23 //nolint
|
||||||
|
|
||||||
|
import (
|
||||||
|
"code.gitea.io/gitea/modules/timeutil"
|
||||||
|
|
||||||
|
"xorm.io/xorm"
|
||||||
|
)
|
||||||
|
|
||||||
|
func CreateTableIssueDevLink(x *xorm.Engine) error {
|
||||||
|
type IssueDevLink struct {
|
||||||
|
ID int64 `xorm:"pk autoincr"`
|
||||||
|
IssueID int64 `xorm:"INDEX"`
|
||||||
|
LinkType int
|
||||||
|
LinkedRepoID int64 `xorm:"INDEX"` // it can link to self repo or other repo
|
||||||
|
LinkIndex string // branch name, pull request number or commit sha
|
||||||
|
CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
|
||||||
|
}
|
||||||
|
return x.Sync(new(IssueDevLink))
|
||||||
|
}
|
@ -31,6 +31,7 @@ import (
|
|||||||
"github.com/blevesearch/bleve/v2/analysis/token/camelcase"
|
"github.com/blevesearch/bleve/v2/analysis/token/camelcase"
|
||||||
"github.com/blevesearch/bleve/v2/analysis/token/lowercase"
|
"github.com/blevesearch/bleve/v2/analysis/token/lowercase"
|
||||||
"github.com/blevesearch/bleve/v2/analysis/token/unicodenorm"
|
"github.com/blevesearch/bleve/v2/analysis/token/unicodenorm"
|
||||||
|
"github.com/blevesearch/bleve/v2/analysis/tokenizer/letter"
|
||||||
"github.com/blevesearch/bleve/v2/analysis/tokenizer/unicode"
|
"github.com/blevesearch/bleve/v2/analysis/tokenizer/unicode"
|
||||||
"github.com/blevesearch/bleve/v2/mapping"
|
"github.com/blevesearch/bleve/v2/mapping"
|
||||||
"github.com/blevesearch/bleve/v2/search/query"
|
"github.com/blevesearch/bleve/v2/search/query"
|
||||||
@ -69,7 +70,7 @@ const (
|
|||||||
filenameIndexerAnalyzer = "filenameIndexerAnalyzer"
|
filenameIndexerAnalyzer = "filenameIndexerAnalyzer"
|
||||||
filenameIndexerTokenizer = "filenameIndexerTokenizer"
|
filenameIndexerTokenizer = "filenameIndexerTokenizer"
|
||||||
repoIndexerDocType = "repoIndexerDocType"
|
repoIndexerDocType = "repoIndexerDocType"
|
||||||
repoIndexerLatestVersion = 7
|
repoIndexerLatestVersion = 8
|
||||||
)
|
)
|
||||||
|
|
||||||
// generateBleveIndexMapping generates a bleve index mapping for the repo indexer
|
// generateBleveIndexMapping generates a bleve index mapping for the repo indexer
|
||||||
@ -105,7 +106,7 @@ func generateBleveIndexMapping() (mapping.IndexMapping, error) {
|
|||||||
} else if err := mapping.AddCustomAnalyzer(repoIndexerAnalyzer, map[string]any{
|
} else if err := mapping.AddCustomAnalyzer(repoIndexerAnalyzer, map[string]any{
|
||||||
"type": analyzer_custom.Name,
|
"type": analyzer_custom.Name,
|
||||||
"char_filters": []string{},
|
"char_filters": []string{},
|
||||||
"tokenizer": unicode.Name,
|
"tokenizer": letter.Name,
|
||||||
"token_filters": []string{unicodeNormalizeName, camelcase.Name, lowercase.Name},
|
"token_filters": []string{unicodeNormalizeName, camelcase.Name, lowercase.Name},
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
@ -30,7 +30,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
esRepoIndexerLatestVersion = 2
|
esRepoIndexerLatestVersion = 3
|
||||||
// multi-match-types, currently only 2 types are used
|
// multi-match-types, currently only 2 types are used
|
||||||
// Reference: https://www.elastic.co/guide/en/elasticsearch/reference/7.0/query-dsl-multi-match-query.html#multi-match-types
|
// Reference: https://www.elastic.co/guide/en/elasticsearch/reference/7.0/query-dsl-multi-match-query.html#multi-match-types
|
||||||
esMultiMatchTypeBestFields = "best_fields"
|
esMultiMatchTypeBestFields = "best_fields"
|
||||||
@ -60,6 +60,10 @@ const (
|
|||||||
"settings": {
|
"settings": {
|
||||||
"analysis": {
|
"analysis": {
|
||||||
"analyzer": {
|
"analyzer": {
|
||||||
|
"content_analyzer": {
|
||||||
|
"tokenizer": "content_tokenizer",
|
||||||
|
"filter" : ["lowercase"]
|
||||||
|
},
|
||||||
"filename_path_analyzer": {
|
"filename_path_analyzer": {
|
||||||
"tokenizer": "path_tokenizer"
|
"tokenizer": "path_tokenizer"
|
||||||
},
|
},
|
||||||
@ -68,6 +72,10 @@ const (
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"tokenizer": {
|
"tokenizer": {
|
||||||
|
"content_tokenizer": {
|
||||||
|
"type": "simple_pattern_split",
|
||||||
|
"pattern": "[^a-zA-Z0-9]"
|
||||||
|
},
|
||||||
"path_tokenizer": {
|
"path_tokenizer": {
|
||||||
"type": "path_hierarchy",
|
"type": "path_hierarchy",
|
||||||
"delimiter": "/"
|
"delimiter": "/"
|
||||||
@ -104,7 +112,8 @@ const (
|
|||||||
"content": {
|
"content": {
|
||||||
"type": "text",
|
"type": "text",
|
||||||
"term_vector": "with_positions_offsets",
|
"term_vector": "with_positions_offsets",
|
||||||
"index": true
|
"index": true,
|
||||||
|
"analyzer": "content_analyzer"
|
||||||
},
|
},
|
||||||
"commit_id": {
|
"commit_id": {
|
||||||
"type": "keyword",
|
"type": "keyword",
|
||||||
|
@ -181,6 +181,55 @@ func testIndexer(name string, t *testing.T, indexer internal.Indexer) {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
// Search for matches on the contents of files regardless of case.
|
||||||
|
{
|
||||||
|
RepoIDs: nil,
|
||||||
|
Keyword: "dESCRIPTION",
|
||||||
|
Langs: 1,
|
||||||
|
Results: []codeSearchResult{
|
||||||
|
{
|
||||||
|
Filename: "README.md",
|
||||||
|
Content: "# repo1\n\nDescription for repo1",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
// Search for an exact match on the filename within the repo '62' (case insenstive).
|
||||||
|
// This scenario yields a single result (the file avocado.md on the repo '62')
|
||||||
|
{
|
||||||
|
RepoIDs: []int64{62},
|
||||||
|
Keyword: "AVOCADO.MD",
|
||||||
|
Langs: 1,
|
||||||
|
Results: []codeSearchResult{
|
||||||
|
{
|
||||||
|
Filename: "avocado.md",
|
||||||
|
Content: "# repo1\n\npineaple pie of cucumber juice",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
// Search for matches on the contents of files when the criteria is a expression.
|
||||||
|
{
|
||||||
|
RepoIDs: []int64{62},
|
||||||
|
Keyword: "console.log",
|
||||||
|
Langs: 1,
|
||||||
|
Results: []codeSearchResult{
|
||||||
|
{
|
||||||
|
Filename: "example-file.js",
|
||||||
|
Content: "console.log(\"Hello, World!\")",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
// Search for matches on the contents of files when the criteria is part of a expression.
|
||||||
|
{
|
||||||
|
RepoIDs: []int64{62},
|
||||||
|
Keyword: "log",
|
||||||
|
Langs: 1,
|
||||||
|
Results: []codeSearchResult{
|
||||||
|
{
|
||||||
|
Filename: "example-file.js",
|
||||||
|
Content: "console.log(\"Hello, World!\")",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, kw := range keywords {
|
for _, kw := range keywords {
|
||||||
|
@ -6,12 +6,13 @@ package bleve
|
|||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
"os"
|
"os"
|
||||||
|
"unicode"
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
"code.gitea.io/gitea/modules/util"
|
"code.gitea.io/gitea/modules/util"
|
||||||
|
|
||||||
"github.com/blevesearch/bleve/v2"
|
"github.com/blevesearch/bleve/v2"
|
||||||
"github.com/blevesearch/bleve/v2/analysis/tokenizer/unicode"
|
unicode_tokenizer "github.com/blevesearch/bleve/v2/analysis/tokenizer/unicode"
|
||||||
"github.com/blevesearch/bleve/v2/index/upsidedown"
|
"github.com/blevesearch/bleve/v2/index/upsidedown"
|
||||||
"github.com/ethantkoenig/rupture"
|
"github.com/ethantkoenig/rupture"
|
||||||
)
|
)
|
||||||
@ -57,7 +58,7 @@ func openIndexer(path string, latestVersion int) (bleve.Index, int, error) {
|
|||||||
// may be different on two string and they still be considered equivalent.
|
// may be different on two string and they still be considered equivalent.
|
||||||
// Given a phrasse, its shortest word determines its fuzziness. If a phrase uses CJK (eg: `갃갃갃` `啊啊啊`), the fuzziness is zero.
|
// Given a phrasse, its shortest word determines its fuzziness. If a phrase uses CJK (eg: `갃갃갃` `啊啊啊`), the fuzziness is zero.
|
||||||
func GuessFuzzinessByKeyword(s string) int {
|
func GuessFuzzinessByKeyword(s string) int {
|
||||||
tokenizer := unicode.NewUnicodeTokenizer()
|
tokenizer := unicode_tokenizer.NewUnicodeTokenizer()
|
||||||
tokens := tokenizer.Tokenize([]byte(s))
|
tokens := tokenizer.Tokenize([]byte(s))
|
||||||
|
|
||||||
if len(tokens) > 0 {
|
if len(tokens) > 0 {
|
||||||
@ -77,8 +78,10 @@ func guessFuzzinessByKeyword(s string) int {
|
|||||||
// according to https://github.com/blevesearch/bleve/issues/1563, the supported max fuzziness is 2
|
// according to https://github.com/blevesearch/bleve/issues/1563, the supported max fuzziness is 2
|
||||||
// magic number 4 was chosen to determine the levenshtein distance per each character of a keyword
|
// magic number 4 was chosen to determine the levenshtein distance per each character of a keyword
|
||||||
// BUT, when using CJK (eg: `갃갃갃` `啊啊啊`), it mismatches a lot.
|
// BUT, when using CJK (eg: `갃갃갃` `啊啊啊`), it mismatches a lot.
|
||||||
|
// Likewise, queries whose terms contains characters that are *not* letters should not use fuzziness
|
||||||
|
|
||||||
for _, r := range s {
|
for _, r := range s {
|
||||||
if r >= 128 {
|
if r >= 128 || !unicode.IsLetter(r) {
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -35,6 +35,14 @@ func TestBleveGuessFuzzinessByKeyword(t *testing.T) {
|
|||||||
Input: "갃갃갃",
|
Input: "갃갃갃",
|
||||||
Fuzziness: 0,
|
Fuzziness: 0,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Input: "repo1",
|
||||||
|
Fuzziness: 0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Input: "avocado.md",
|
||||||
|
Fuzziness: 0,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, scenario := range scenarios {
|
for _, scenario := range scenarios {
|
||||||
|
@ -12,7 +12,7 @@ import (
|
|||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
api "code.gitea.io/gitea/modules/structs"
|
api "code.gitea.io/gitea/modules/structs"
|
||||||
"code.gitea.io/gitea/modules/web"
|
"code.gitea.io/gitea/modules/web"
|
||||||
"code.gitea.io/gitea/services/attachment"
|
attachment_service "code.gitea.io/gitea/services/attachment"
|
||||||
"code.gitea.io/gitea/services/context"
|
"code.gitea.io/gitea/services/context"
|
||||||
"code.gitea.io/gitea/services/context/upload"
|
"code.gitea.io/gitea/services/context/upload"
|
||||||
"code.gitea.io/gitea/services/convert"
|
"code.gitea.io/gitea/services/convert"
|
||||||
@ -181,7 +181,7 @@ func CreateIssueAttachment(ctx *context.APIContext) {
|
|||||||
filename = query
|
filename = query
|
||||||
}
|
}
|
||||||
|
|
||||||
attachment, err := attachment.UploadAttachment(ctx, file, setting.Attachment.AllowedTypes, header.Size, &repo_model.Attachment{
|
attachment, err := attachment_service.UploadAttachment(ctx, file, setting.Attachment.AllowedTypes, header.Size, &repo_model.Attachment{
|
||||||
Name: filename,
|
Name: filename,
|
||||||
UploaderID: ctx.Doer.ID,
|
UploaderID: ctx.Doer.ID,
|
||||||
RepoID: ctx.Repo.Repository.ID,
|
RepoID: ctx.Repo.Repository.ID,
|
||||||
@ -247,6 +247,8 @@ func EditIssueAttachment(ctx *context.APIContext) {
|
|||||||
// "$ref": "#/responses/Attachment"
|
// "$ref": "#/responses/Attachment"
|
||||||
// "404":
|
// "404":
|
||||||
// "$ref": "#/responses/error"
|
// "$ref": "#/responses/error"
|
||||||
|
// "422":
|
||||||
|
// "$ref": "#/responses/validationError"
|
||||||
// "423":
|
// "423":
|
||||||
// "$ref": "#/responses/repoArchivedError"
|
// "$ref": "#/responses/repoArchivedError"
|
||||||
|
|
||||||
@ -261,8 +263,13 @@ func EditIssueAttachment(ctx *context.APIContext) {
|
|||||||
attachment.Name = form.Name
|
attachment.Name = form.Name
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := repo_model.UpdateAttachment(ctx, attachment); err != nil {
|
if err := attachment_service.UpdateAttachment(ctx, setting.Attachment.AllowedTypes, attachment); err != nil {
|
||||||
|
if upload.IsErrFileTypeForbidden(err) {
|
||||||
|
ctx.Error(http.StatusUnprocessableEntity, "", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
ctx.Error(http.StatusInternalServerError, "UpdateAttachment", err)
|
ctx.Error(http.StatusInternalServerError, "UpdateAttachment", err)
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx.JSON(http.StatusCreated, convert.ToAPIAttachment(ctx.Repo.Repository, attachment))
|
ctx.JSON(http.StatusCreated, convert.ToAPIAttachment(ctx.Repo.Repository, attachment))
|
||||||
|
@ -14,7 +14,7 @@ import (
|
|||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
api "code.gitea.io/gitea/modules/structs"
|
api "code.gitea.io/gitea/modules/structs"
|
||||||
"code.gitea.io/gitea/modules/web"
|
"code.gitea.io/gitea/modules/web"
|
||||||
"code.gitea.io/gitea/services/attachment"
|
attachment_service "code.gitea.io/gitea/services/attachment"
|
||||||
"code.gitea.io/gitea/services/context"
|
"code.gitea.io/gitea/services/context"
|
||||||
"code.gitea.io/gitea/services/context/upload"
|
"code.gitea.io/gitea/services/context/upload"
|
||||||
"code.gitea.io/gitea/services/convert"
|
"code.gitea.io/gitea/services/convert"
|
||||||
@ -189,7 +189,7 @@ func CreateIssueCommentAttachment(ctx *context.APIContext) {
|
|||||||
filename = query
|
filename = query
|
||||||
}
|
}
|
||||||
|
|
||||||
attachment, err := attachment.UploadAttachment(ctx, file, setting.Attachment.AllowedTypes, header.Size, &repo_model.Attachment{
|
attachment, err := attachment_service.UploadAttachment(ctx, file, setting.Attachment.AllowedTypes, header.Size, &repo_model.Attachment{
|
||||||
Name: filename,
|
Name: filename,
|
||||||
UploaderID: ctx.Doer.ID,
|
UploaderID: ctx.Doer.ID,
|
||||||
RepoID: ctx.Repo.Repository.ID,
|
RepoID: ctx.Repo.Repository.ID,
|
||||||
@ -263,6 +263,8 @@ func EditIssueCommentAttachment(ctx *context.APIContext) {
|
|||||||
// "$ref": "#/responses/Attachment"
|
// "$ref": "#/responses/Attachment"
|
||||||
// "404":
|
// "404":
|
||||||
// "$ref": "#/responses/error"
|
// "$ref": "#/responses/error"
|
||||||
|
// "422":
|
||||||
|
// "$ref": "#/responses/validationError"
|
||||||
// "423":
|
// "423":
|
||||||
// "$ref": "#/responses/repoArchivedError"
|
// "$ref": "#/responses/repoArchivedError"
|
||||||
attach := getIssueCommentAttachmentSafeWrite(ctx)
|
attach := getIssueCommentAttachmentSafeWrite(ctx)
|
||||||
@ -275,8 +277,13 @@ func EditIssueCommentAttachment(ctx *context.APIContext) {
|
|||||||
attach.Name = form.Name
|
attach.Name = form.Name
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := repo_model.UpdateAttachment(ctx, attach); err != nil {
|
if err := attachment_service.UpdateAttachment(ctx, setting.Attachment.AllowedTypes, attach); err != nil {
|
||||||
|
if upload.IsErrFileTypeForbidden(err) {
|
||||||
|
ctx.Error(http.StatusUnprocessableEntity, "", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
ctx.Error(http.StatusInternalServerError, "UpdateAttachment", attach)
|
ctx.Error(http.StatusInternalServerError, "UpdateAttachment", attach)
|
||||||
|
return
|
||||||
}
|
}
|
||||||
ctx.JSON(http.StatusCreated, convert.ToAPIAttachment(ctx.Repo.Repository, attach))
|
ctx.JSON(http.StatusCreated, convert.ToAPIAttachment(ctx.Repo.Repository, attach))
|
||||||
}
|
}
|
||||||
|
@ -13,7 +13,7 @@ import (
|
|||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
api "code.gitea.io/gitea/modules/structs"
|
api "code.gitea.io/gitea/modules/structs"
|
||||||
"code.gitea.io/gitea/modules/web"
|
"code.gitea.io/gitea/modules/web"
|
||||||
"code.gitea.io/gitea/services/attachment"
|
attachment_service "code.gitea.io/gitea/services/attachment"
|
||||||
"code.gitea.io/gitea/services/context"
|
"code.gitea.io/gitea/services/context"
|
||||||
"code.gitea.io/gitea/services/context/upload"
|
"code.gitea.io/gitea/services/context/upload"
|
||||||
"code.gitea.io/gitea/services/convert"
|
"code.gitea.io/gitea/services/convert"
|
||||||
@ -234,7 +234,7 @@ func CreateReleaseAttachment(ctx *context.APIContext) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Create a new attachment and save the file
|
// Create a new attachment and save the file
|
||||||
attach, err := attachment.UploadAttachment(ctx, content, setting.Repository.Release.AllowedTypes, size, &repo_model.Attachment{
|
attach, err := attachment_service.UploadAttachment(ctx, content, setting.Repository.Release.AllowedTypes, size, &repo_model.Attachment{
|
||||||
Name: filename,
|
Name: filename,
|
||||||
UploaderID: ctx.Doer.ID,
|
UploaderID: ctx.Doer.ID,
|
||||||
RepoID: ctx.Repo.Repository.ID,
|
RepoID: ctx.Repo.Repository.ID,
|
||||||
@ -291,6 +291,8 @@ func EditReleaseAttachment(ctx *context.APIContext) {
|
|||||||
// responses:
|
// responses:
|
||||||
// "201":
|
// "201":
|
||||||
// "$ref": "#/responses/Attachment"
|
// "$ref": "#/responses/Attachment"
|
||||||
|
// "422":
|
||||||
|
// "$ref": "#/responses/validationError"
|
||||||
// "404":
|
// "404":
|
||||||
// "$ref": "#/responses/notFound"
|
// "$ref": "#/responses/notFound"
|
||||||
|
|
||||||
@ -322,8 +324,13 @@ func EditReleaseAttachment(ctx *context.APIContext) {
|
|||||||
attach.Name = form.Name
|
attach.Name = form.Name
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := repo_model.UpdateAttachment(ctx, attach); err != nil {
|
if err := attachment_service.UpdateAttachment(ctx, setting.Repository.Release.AllowedTypes, attach); err != nil {
|
||||||
|
if upload.IsErrFileTypeForbidden(err) {
|
||||||
|
ctx.Error(http.StatusUnprocessableEntity, "", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
ctx.Error(http.StatusInternalServerError, "UpdateAttachment", attach)
|
ctx.Error(http.StatusInternalServerError, "UpdateAttachment", attach)
|
||||||
|
return
|
||||||
}
|
}
|
||||||
ctx.JSON(http.StatusCreated, convert.ToAPIAttachment(ctx.Repo.Repository, attach))
|
ctx.JSON(http.StatusCreated, convert.ToAPIAttachment(ctx.Repo.Repository, attach))
|
||||||
}
|
}
|
||||||
|
@ -50,3 +50,12 @@ func UploadAttachment(ctx context.Context, file io.Reader, allowedTypes string,
|
|||||||
|
|
||||||
return NewAttachment(ctx, attach, io.MultiReader(bytes.NewReader(buf), file), fileSize)
|
return NewAttachment(ctx, attach, io.MultiReader(bytes.NewReader(buf), file), fileSize)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// UpdateAttachment updates an attachment, verifying that its name is among the allowed types.
|
||||||
|
func UpdateAttachment(ctx context.Context, allowedTypes string, attach *repo_model.Attachment) error {
|
||||||
|
if err := upload.Verify(nil, attach.Name, allowedTypes); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return repo_model.UpdateAttachment(ctx, attach)
|
||||||
|
}
|
||||||
|
@ -28,12 +28,13 @@ func IsErrFileTypeForbidden(err error) bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (err ErrFileTypeForbidden) Error() string {
|
func (err ErrFileTypeForbidden) Error() string {
|
||||||
return "This file extension or type is not allowed to be uploaded."
|
return "This file cannot be uploaded or modified due to a forbidden file extension or type."
|
||||||
}
|
}
|
||||||
|
|
||||||
var wildcardTypeRe = regexp.MustCompile(`^[a-z]+/\*$`)
|
var wildcardTypeRe = regexp.MustCompile(`^[a-z]+/\*$`)
|
||||||
|
|
||||||
// Verify validates whether a file is allowed to be uploaded.
|
// Verify validates whether a file is allowed to be uploaded. If buf is empty, it will just check if the file
|
||||||
|
// has an allowed file extension.
|
||||||
func Verify(buf []byte, fileName, allowedTypesStr string) error {
|
func Verify(buf []byte, fileName, allowedTypesStr string) error {
|
||||||
allowedTypesStr = strings.ReplaceAll(allowedTypesStr, "|", ",") // compat for old config format
|
allowedTypesStr = strings.ReplaceAll(allowedTypesStr, "|", ",") // compat for old config format
|
||||||
|
|
||||||
@ -56,21 +57,31 @@ func Verify(buf []byte, fileName, allowedTypesStr string) error {
|
|||||||
return ErrFileTypeForbidden{Type: fullMimeType}
|
return ErrFileTypeForbidden{Type: fullMimeType}
|
||||||
}
|
}
|
||||||
extension := strings.ToLower(path.Ext(fileName))
|
extension := strings.ToLower(path.Ext(fileName))
|
||||||
|
isBufEmpty := len(buf) <= 1
|
||||||
|
|
||||||
// https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input/file#Unique_file_type_specifiers
|
// https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input/file#Unique_file_type_specifiers
|
||||||
for _, allowEntry := range allowedTypes {
|
for _, allowEntry := range allowedTypes {
|
||||||
if allowEntry == "*/*" {
|
if allowEntry == "*/*" {
|
||||||
return nil // everything allowed
|
return nil // everything allowed
|
||||||
} else if strings.HasPrefix(allowEntry, ".") && allowEntry == extension {
|
}
|
||||||
|
if strings.HasPrefix(allowEntry, ".") && allowEntry == extension {
|
||||||
return nil // extension is allowed
|
return nil // extension is allowed
|
||||||
} else if mimeType == allowEntry {
|
}
|
||||||
|
if isBufEmpty {
|
||||||
|
continue // skip mime type checks if buffer is empty
|
||||||
|
}
|
||||||
|
if mimeType == allowEntry {
|
||||||
return nil // mime type is allowed
|
return nil // mime type is allowed
|
||||||
} else if wildcardTypeRe.MatchString(allowEntry) && strings.HasPrefix(mimeType, allowEntry[:len(allowEntry)-1]) {
|
}
|
||||||
|
if wildcardTypeRe.MatchString(allowEntry) && strings.HasPrefix(mimeType, allowEntry[:len(allowEntry)-1]) {
|
||||||
return nil // wildcard match, e.g. image/*
|
return nil // wildcard match, e.g. image/*
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Info("Attachment with type %s blocked from upload", fullMimeType)
|
if !isBufEmpty {
|
||||||
|
log.Info("Attachment with type %s blocked from upload", fullMimeType)
|
||||||
|
}
|
||||||
|
|
||||||
return ErrFileTypeForbidden{Type: fullMimeType}
|
return ErrFileTypeForbidden{Type: fullMimeType}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
9
templates/swagger/v1_json.tmpl
generated
9
templates/swagger/v1_json.tmpl
generated
@ -7706,6 +7706,9 @@
|
|||||||
"404": {
|
"404": {
|
||||||
"$ref": "#/responses/error"
|
"$ref": "#/responses/error"
|
||||||
},
|
},
|
||||||
|
"422": {
|
||||||
|
"$ref": "#/responses/validationError"
|
||||||
|
},
|
||||||
"423": {
|
"423": {
|
||||||
"$ref": "#/responses/repoArchivedError"
|
"$ref": "#/responses/repoArchivedError"
|
||||||
}
|
}
|
||||||
@ -8328,6 +8331,9 @@
|
|||||||
"404": {
|
"404": {
|
||||||
"$ref": "#/responses/error"
|
"$ref": "#/responses/error"
|
||||||
},
|
},
|
||||||
|
"422": {
|
||||||
|
"$ref": "#/responses/validationError"
|
||||||
|
},
|
||||||
"423": {
|
"423": {
|
||||||
"$ref": "#/responses/repoArchivedError"
|
"$ref": "#/responses/repoArchivedError"
|
||||||
}
|
}
|
||||||
@ -13474,6 +13480,9 @@
|
|||||||
},
|
},
|
||||||
"404": {
|
"404": {
|
||||||
"$ref": "#/responses/notFound"
|
"$ref": "#/responses/notFound"
|
||||||
|
},
|
||||||
|
"422": {
|
||||||
|
"$ref": "#/responses/validationError"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -4,5 +4,6 @@ This repository will be used to test code search. The snippet below shows its di
|
|||||||
├── avocado.md
|
├── avocado.md
|
||||||
├── cucumber.md
|
├── cucumber.md
|
||||||
├── ham.md
|
├── ham.md
|
||||||
└── potato
|
├── potato
|
||||||
└── ham.md
|
| └── ham.md
|
||||||
|
└── example-file.js
|
@ -3,7 +3,7 @@
|
|||||||
65f1bf27bc3bf70f64657658635e66094edbcb4d refs/heads/develop
|
65f1bf27bc3bf70f64657658635e66094edbcb4d refs/heads/develop
|
||||||
65f1bf27bc3bf70f64657658635e66094edbcb4d refs/heads/feature/1
|
65f1bf27bc3bf70f64657658635e66094edbcb4d refs/heads/feature/1
|
||||||
78fb907e3a3309eae4fe8fef030874cebbf1cd5e refs/heads/home-md-img-check
|
78fb907e3a3309eae4fe8fef030874cebbf1cd5e refs/heads/home-md-img-check
|
||||||
3731fe53b763859aaf83e703ee731f6b9447ff1e refs/heads/master
|
9f894b61946fd2f7b8b9d8e370e4d62f915522f5 refs/heads/master
|
||||||
62fb502a7172d4453f0322a2cc85bddffa57f07a refs/heads/pr-to-update
|
62fb502a7172d4453f0322a2cc85bddffa57f07a refs/heads/pr-to-update
|
||||||
4649299398e4d39a5c09eb4f534df6f1e1eb87cc refs/heads/sub-home-md-img-check
|
4649299398e4d39a5c09eb4f534df6f1e1eb87cc refs/heads/sub-home-md-img-check
|
||||||
3fa2f829675543ecfc16b2891aebe8bf0608a8f4 refs/notes/commits
|
3fa2f829675543ecfc16b2891aebe8bf0608a8f4 refs/notes/commits
|
||||||
|
Binary file not shown.
@ -1,2 +1,2 @@
|
|||||||
P pack-393dc29256bc27cb2ec73898507df710be7a3cf5.pack
|
P pack-a7bef76cf6e2b46bc816936ab69306fb10aea571.pack
|
||||||
|
|
||||||
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -4,7 +4,7 @@
|
|||||||
65f1bf27bc3bf70f64657658635e66094edbcb4d refs/heads/develop
|
65f1bf27bc3bf70f64657658635e66094edbcb4d refs/heads/develop
|
||||||
65f1bf27bc3bf70f64657658635e66094edbcb4d refs/heads/feature/1
|
65f1bf27bc3bf70f64657658635e66094edbcb4d refs/heads/feature/1
|
||||||
78fb907e3a3309eae4fe8fef030874cebbf1cd5e refs/heads/home-md-img-check
|
78fb907e3a3309eae4fe8fef030874cebbf1cd5e refs/heads/home-md-img-check
|
||||||
3731fe53b763859aaf83e703ee731f6b9447ff1e refs/heads/master
|
9f894b61946fd2f7b8b9d8e370e4d62f915522f5 refs/heads/master
|
||||||
62fb502a7172d4453f0322a2cc85bddffa57f07a refs/heads/pr-to-update
|
62fb502a7172d4453f0322a2cc85bddffa57f07a refs/heads/pr-to-update
|
||||||
4649299398e4d39a5c09eb4f534df6f1e1eb87cc refs/heads/sub-home-md-img-check
|
4649299398e4d39a5c09eb4f534df6f1e1eb87cc refs/heads/sub-home-md-img-check
|
||||||
3fa2f829675543ecfc16b2891aebe8bf0608a8f4 refs/notes/commits
|
3fa2f829675543ecfc16b2891aebe8bf0608a8f4 refs/notes/commits
|
||||||
|
@ -151,7 +151,7 @@ func TestAPICreateCommentAttachmentWithUnallowedFile(t *testing.T) {
|
|||||||
func TestAPIEditCommentAttachment(t *testing.T) {
|
func TestAPIEditCommentAttachment(t *testing.T) {
|
||||||
defer tests.PrepareTestEnv(t)()
|
defer tests.PrepareTestEnv(t)()
|
||||||
|
|
||||||
const newAttachmentName = "newAttachmentName"
|
const newAttachmentName = "newAttachmentName.txt"
|
||||||
|
|
||||||
attachment := unittest.AssertExistsAndLoadBean(t, &repo_model.Attachment{ID: 6})
|
attachment := unittest.AssertExistsAndLoadBean(t, &repo_model.Attachment{ID: 6})
|
||||||
comment := unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{ID: attachment.CommentID})
|
comment := unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{ID: attachment.CommentID})
|
||||||
@ -173,6 +173,27 @@ func TestAPIEditCommentAttachment(t *testing.T) {
|
|||||||
unittest.AssertExistsAndLoadBean(t, &repo_model.Attachment{ID: apiAttachment.ID, CommentID: comment.ID, Name: apiAttachment.Name})
|
unittest.AssertExistsAndLoadBean(t, &repo_model.Attachment{ID: apiAttachment.ID, CommentID: comment.ID, Name: apiAttachment.Name})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestAPIEditCommentAttachmentWithUnallowedFile(t *testing.T) {
|
||||||
|
defer tests.PrepareTestEnv(t)()
|
||||||
|
|
||||||
|
attachment := unittest.AssertExistsAndLoadBean(t, &repo_model.Attachment{ID: 6})
|
||||||
|
comment := unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{ID: attachment.CommentID})
|
||||||
|
issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: comment.IssueID})
|
||||||
|
repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: issue.RepoID})
|
||||||
|
repoOwner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: repo.OwnerID})
|
||||||
|
session := loginUser(t, repoOwner.Name)
|
||||||
|
token := getTokenForLoggedInUser(t, session, auth_model.AccessTokenScopeWriteIssue)
|
||||||
|
|
||||||
|
filename := "file.bad"
|
||||||
|
urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/issues/comments/%d/assets/%d",
|
||||||
|
repoOwner.Name, repo.Name, comment.ID, attachment.ID)
|
||||||
|
req := NewRequestWithValues(t, "PATCH", urlStr, map[string]string{
|
||||||
|
"name": filename,
|
||||||
|
}).AddTokenAuth(token)
|
||||||
|
|
||||||
|
session.MakeRequest(t, req, http.StatusUnprocessableEntity)
|
||||||
|
}
|
||||||
|
|
||||||
func TestAPIDeleteCommentAttachment(t *testing.T) {
|
func TestAPIDeleteCommentAttachment(t *testing.T) {
|
||||||
defer tests.PrepareTestEnv(t)()
|
defer tests.PrepareTestEnv(t)()
|
||||||
|
|
||||||
|
@ -126,7 +126,7 @@ func TestAPICreateIssueAttachmentWithUnallowedFile(t *testing.T) {
|
|||||||
func TestAPIEditIssueAttachment(t *testing.T) {
|
func TestAPIEditIssueAttachment(t *testing.T) {
|
||||||
defer tests.PrepareTestEnv(t)()
|
defer tests.PrepareTestEnv(t)()
|
||||||
|
|
||||||
const newAttachmentName = "newAttachmentName"
|
const newAttachmentName = "hello_world.txt"
|
||||||
|
|
||||||
attachment := unittest.AssertExistsAndLoadBean(t, &repo_model.Attachment{ID: 1})
|
attachment := unittest.AssertExistsAndLoadBean(t, &repo_model.Attachment{ID: 1})
|
||||||
repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: attachment.RepoID})
|
repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: attachment.RepoID})
|
||||||
@ -147,6 +147,26 @@ func TestAPIEditIssueAttachment(t *testing.T) {
|
|||||||
unittest.AssertExistsAndLoadBean(t, &repo_model.Attachment{ID: apiAttachment.ID, IssueID: issue.ID, Name: apiAttachment.Name})
|
unittest.AssertExistsAndLoadBean(t, &repo_model.Attachment{ID: apiAttachment.ID, IssueID: issue.ID, Name: apiAttachment.Name})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestAPIEditIssueAttachmentWithUnallowedFile(t *testing.T) {
|
||||||
|
defer tests.PrepareTestEnv(t)()
|
||||||
|
|
||||||
|
attachment := unittest.AssertExistsAndLoadBean(t, &repo_model.Attachment{ID: 1})
|
||||||
|
repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: attachment.RepoID})
|
||||||
|
issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: attachment.IssueID})
|
||||||
|
repoOwner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: repo.OwnerID})
|
||||||
|
|
||||||
|
session := loginUser(t, repoOwner.Name)
|
||||||
|
token := getTokenForLoggedInUser(t, session, auth_model.AccessTokenScopeWriteIssue)
|
||||||
|
|
||||||
|
filename := "file.bad"
|
||||||
|
urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/issues/%d/assets/%d", repoOwner.Name, repo.Name, issue.Index, attachment.ID)
|
||||||
|
req := NewRequestWithValues(t, "PATCH", urlStr, map[string]string{
|
||||||
|
"name": filename,
|
||||||
|
}).AddTokenAuth(token)
|
||||||
|
|
||||||
|
session.MakeRequest(t, req, http.StatusUnprocessableEntity)
|
||||||
|
}
|
||||||
|
|
||||||
func TestAPIDeleteIssueAttachment(t *testing.T) {
|
func TestAPIDeleteIssueAttachment(t *testing.T) {
|
||||||
defer tests.PrepareTestEnv(t)()
|
defer tests.PrepareTestEnv(t)()
|
||||||
|
|
||||||
|
40
tests/integration/api_releases_attachment_test.go
Normal file
40
tests/integration/api_releases_attachment_test.go
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
// Copyright 2024 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package integration
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
auth_model "code.gitea.io/gitea/models/auth"
|
||||||
|
repo_model "code.gitea.io/gitea/models/repo"
|
||||||
|
"code.gitea.io/gitea/models/unittest"
|
||||||
|
user_model "code.gitea.io/gitea/models/user"
|
||||||
|
"code.gitea.io/gitea/modules/setting"
|
||||||
|
"code.gitea.io/gitea/modules/test"
|
||||||
|
"code.gitea.io/gitea/tests"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestAPIEditReleaseAttachmentWithUnallowedFile(t *testing.T) {
|
||||||
|
// Limit the allowed release types (since by default there is no restriction)
|
||||||
|
defer test.MockVariableValue(&setting.Repository.Release.AllowedTypes, ".exe")()
|
||||||
|
defer tests.PrepareTestEnv(t)()
|
||||||
|
|
||||||
|
attachment := unittest.AssertExistsAndLoadBean(t, &repo_model.Attachment{ID: 9})
|
||||||
|
release := unittest.AssertExistsAndLoadBean(t, &repo_model.Release{ID: attachment.ReleaseID})
|
||||||
|
repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: attachment.RepoID})
|
||||||
|
repoOwner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: repo.OwnerID})
|
||||||
|
|
||||||
|
session := loginUser(t, repoOwner.Name)
|
||||||
|
token := getTokenForLoggedInUser(t, session, auth_model.AccessTokenScopeWriteRepository)
|
||||||
|
|
||||||
|
filename := "file.bad"
|
||||||
|
urlStr := fmt.Sprintf("/api/v1/repos/%s/%s/releases/%d/assets/%d", repoOwner.Name, repo.Name, release.ID, attachment.ID)
|
||||||
|
req := NewRequestWithValues(t, "PATCH", urlStr, map[string]string{
|
||||||
|
"name": filename,
|
||||||
|
}).AddTokenAuth(token)
|
||||||
|
|
||||||
|
session.MakeRequest(t, req, http.StatusUnprocessableEntity)
|
||||||
|
}
|
@ -1,3 +1,5 @@
|
|||||||
|
import {querySingleVisibleElem} from '../../utils/dom.ts';
|
||||||
|
|
||||||
export function handleGlobalEnterQuickSubmit(target) {
|
export function handleGlobalEnterQuickSubmit(target) {
|
||||||
let form = target.closest('form');
|
let form = target.closest('form');
|
||||||
if (form) {
|
if (form) {
|
||||||
@ -12,7 +14,11 @@ export function handleGlobalEnterQuickSubmit(target) {
|
|||||||
}
|
}
|
||||||
form = target.closest('.ui.form');
|
form = target.closest('.ui.form');
|
||||||
if (form) {
|
if (form) {
|
||||||
form.querySelector('.ui.primary.button')?.click();
|
// A form should only have at most one "primary" button to do quick-submit.
|
||||||
|
// Here we don't use a special class to mark the primary button,
|
||||||
|
// because there could be a lot of forms with a primary button, the quick submit should work out-of-box,
|
||||||
|
// but not keeps asking developers to add that special class again and again (it could be forgotten easily)
|
||||||
|
querySingleVisibleElem<HTMLButtonElement>(form, '.ui.primary.button')?.click();
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
|
@ -3,7 +3,7 @@ import {handleReply} from './repo-issue.ts';
|
|||||||
import {getComboMarkdownEditor, initComboMarkdownEditor, ComboMarkdownEditor} from './comp/ComboMarkdownEditor.ts';
|
import {getComboMarkdownEditor, initComboMarkdownEditor, ComboMarkdownEditor} from './comp/ComboMarkdownEditor.ts';
|
||||||
import {POST} from '../modules/fetch.ts';
|
import {POST} from '../modules/fetch.ts';
|
||||||
import {showErrorToast} from '../modules/toast.ts';
|
import {showErrorToast} from '../modules/toast.ts';
|
||||||
import {hideElem, showElem} from '../utils/dom.ts';
|
import {hideElem, querySingleVisibleElem, showElem} from '../utils/dom.ts';
|
||||||
import {attachRefIssueContextPopup} from './contextpopup.ts';
|
import {attachRefIssueContextPopup} from './contextpopup.ts';
|
||||||
import {initCommentContent, initMarkupContent} from '../markup/content.ts';
|
import {initCommentContent, initMarkupContent} from '../markup/content.ts';
|
||||||
import {triggerUploadStateChanged} from './comp/EditorUpload.ts';
|
import {triggerUploadStateChanged} from './comp/EditorUpload.ts';
|
||||||
@ -77,20 +77,22 @@ async function onEditContent(event) {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
comboMarkdownEditor = getComboMarkdownEditor(editContentZone.querySelector('.combo-markdown-editor'));
|
|
||||||
if (!comboMarkdownEditor) {
|
|
||||||
editContentZone.innerHTML = document.querySelector('#issue-comment-editor-template').innerHTML;
|
|
||||||
const saveButton = editContentZone.querySelector('.ui.primary.button');
|
|
||||||
comboMarkdownEditor = await initComboMarkdownEditor(editContentZone.querySelector('.combo-markdown-editor'));
|
|
||||||
const syncUiState = () => saveButton.disabled = comboMarkdownEditor.isUploading();
|
|
||||||
comboMarkdownEditor.container.addEventListener(ComboMarkdownEditor.EventUploadStateChanged, syncUiState);
|
|
||||||
editContentZone.querySelector('.ui.cancel.button').addEventListener('click', cancelAndReset);
|
|
||||||
saveButton.addEventListener('click', saveAndRefresh);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Show write/preview tab and copy raw content as needed
|
// Show write/preview tab and copy raw content as needed
|
||||||
showElem(editContentZone);
|
showElem(editContentZone);
|
||||||
hideElem(renderContent);
|
hideElem(renderContent);
|
||||||
|
|
||||||
|
comboMarkdownEditor = getComboMarkdownEditor(editContentZone.querySelector('.combo-markdown-editor'));
|
||||||
|
if (!comboMarkdownEditor) {
|
||||||
|
editContentZone.innerHTML = document.querySelector('#issue-comment-editor-template').innerHTML;
|
||||||
|
const saveButton = querySingleVisibleElem<HTMLButtonElement>(editContentZone, '.ui.primary.button');
|
||||||
|
const cancelButton = querySingleVisibleElem<HTMLButtonElement>(editContentZone, '.ui.cancel.button');
|
||||||
|
comboMarkdownEditor = await initComboMarkdownEditor(editContentZone.querySelector('.combo-markdown-editor'));
|
||||||
|
const syncUiState = () => saveButton.disabled = comboMarkdownEditor.isUploading();
|
||||||
|
comboMarkdownEditor.container.addEventListener(ComboMarkdownEditor.EventUploadStateChanged, syncUiState);
|
||||||
|
cancelButton.addEventListener('click', cancelAndReset);
|
||||||
|
saveButton.addEventListener('click', saveAndRefresh);
|
||||||
|
}
|
||||||
|
|
||||||
// FIXME: ideally here should reload content and attachment list from backend for existing editor, to avoid losing data
|
// FIXME: ideally here should reload content and attachment list from backend for existing editor, to avoid losing data
|
||||||
if (!comboMarkdownEditor.value()) {
|
if (!comboMarkdownEditor.value()) {
|
||||||
comboMarkdownEditor.value(rawContent.textContent);
|
comboMarkdownEditor.value(rawContent.textContent);
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import {createElementFromAttrs, createElementFromHTML} from './dom.ts';
|
import {createElementFromAttrs, createElementFromHTML, querySingleVisibleElem} from './dom.ts';
|
||||||
|
|
||||||
test('createElementFromHTML', () => {
|
test('createElementFromHTML', () => {
|
||||||
expect(createElementFromHTML('<a>foo<span>bar</span></a>').outerHTML).toEqual('<a>foo<span>bar</span></a>');
|
expect(createElementFromHTML('<a>foo<span>bar</span></a>').outerHTML).toEqual('<a>foo<span>bar</span></a>');
|
||||||
@ -16,3 +16,12 @@ test('createElementFromAttrs', () => {
|
|||||||
}, 'txt', createElementFromHTML('<span>inner</span>'));
|
}, 'txt', createElementFromHTML('<span>inner</span>'));
|
||||||
expect(el.outerHTML).toEqual('<button id="the-id" class="cls-1 cls-2" disabled="" tabindex="0" data-foo="the-data">txt<span>inner</span></button>');
|
expect(el.outerHTML).toEqual('<button id="the-id" class="cls-1 cls-2" disabled="" tabindex="0" data-foo="the-data">txt<span>inner</span></button>');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('querySingleVisibleElem', () => {
|
||||||
|
let el = createElementFromHTML('<div><span>foo</span></div>');
|
||||||
|
expect(querySingleVisibleElem(el, 'span').textContent).toEqual('foo');
|
||||||
|
el = createElementFromHTML('<div><span style="display: none;">foo</span><span>bar</span></div>');
|
||||||
|
expect(querySingleVisibleElem(el, 'span').textContent).toEqual('bar');
|
||||||
|
el = createElementFromHTML('<div><span>foo</span><span>bar</span></div>');
|
||||||
|
expect(() => querySingleVisibleElem(el, 'span')).toThrowError('Expected exactly one visible element');
|
||||||
|
});
|
||||||
|
@ -269,8 +269,8 @@ export function initSubmitEventPolyfill() {
|
|||||||
*/
|
*/
|
||||||
export function isElemVisible(element: HTMLElement): boolean {
|
export function isElemVisible(element: HTMLElement): boolean {
|
||||||
if (!element) return false;
|
if (!element) return false;
|
||||||
|
// checking element.style.display is not necessary for browsers, but it is required by some tests with happy-dom because happy-dom doesn't really do layout
|
||||||
return Boolean(element.offsetWidth || element.offsetHeight || element.getClientRects().length);
|
return Boolean((element.offsetWidth || element.offsetHeight || element.getClientRects().length) && element.style.display !== 'none');
|
||||||
}
|
}
|
||||||
|
|
||||||
// replace selected text in a textarea while preserving editor history, e.g. CTRL-Z works after this
|
// replace selected text in a textarea while preserving editor history, e.g. CTRL-Z works after this
|
||||||
@ -330,3 +330,10 @@ export function animateOnce(el: Element, animationClassName: string): Promise<vo
|
|||||||
el.classList.add(animationClassName);
|
el.classList.add(animationClassName);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function querySingleVisibleElem<T extends HTMLElement>(parent: Element, selector: string): T | null {
|
||||||
|
const elems = parent.querySelectorAll<HTMLElement>(selector);
|
||||||
|
const candidates = Array.from(elems).filter(isElemVisible);
|
||||||
|
if (candidates.length > 1) throw new Error(`Expected exactly one visible element matching selector "${selector}", but found ${candidates.length}`);
|
||||||
|
return candidates.length ? candidates[0] as T : null;
|
||||||
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user