1
1
mirror of https://github.com/go-gitea/gitea synced 2024-11-05 01:34:25 +00:00
gitea/routers/web/user/code.go

115 lines
2.8 KiB
Go
Raw Normal View History

// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package user
import (
"net/http"
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/modules/base"
"code.gitea.io/gitea/modules/context"
code_indexer "code.gitea.io/gitea/modules/indexer/code"
"code.gitea.io/gitea/modules/setting"
)
const (
tplUserCode base.TplName = "user/code"
)
// CodeSearch render user/organization code search page
func CodeSearch(ctx *context.Context) {
if !setting.Indexer.RepoIndexerEnabled {
ctx.Redirect(ctx.ContextUser.HomeLink())
return
}
ctx.Data["IsProjectEnabled"] = true
ctx.Data["IsPackageEnabled"] = setting.Packages.Enabled
ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled
ctx.Data["Title"] = ctx.Tr("explore.code")
ctx.Data["ContextUser"] = ctx.ContextUser
language := ctx.FormTrim("l")
keyword := ctx.FormTrim("q")
queryType := ctx.FormTrim("t")
isMatch := queryType == "match"
ctx.Data["Keyword"] = keyword
ctx.Data["Language"] = language
ctx.Data["queryType"] = queryType
ctx.Data["IsCodePage"] = true
if keyword == "" {
ctx.HTML(http.StatusOK, tplUserCode)
return
}
var (
repoIDs []int64
err error
)
page := ctx.FormInt("page")
if page <= 0 {
page = 1
}
repoIDs, err = repo_model.FindUserCodeAccessibleOwnerRepoIDs(ctx, ctx.ContextUser.ID, ctx.Doer)
if err != nil {
ctx.ServerError("FindUserCodeAccessibleOwnerRepoIDs", err)
return
}
var (
total int
searchResults []*code_indexer.Result
searchResultLanguages []*code_indexer.SearchResultLanguages
)
if len(repoIDs) > 0 {
total, searchResults, searchResultLanguages, err = code_indexer.PerformSearch(ctx, repoIDs, language, keyword, page, setting.UI.RepoSearchPagingNum, isMatch)
if err != nil {
Refactor indexer (#25174) Refactor `modules/indexer` to make it more maintainable. And it can be easier to support more features. I'm trying to solve some of issue searching, this is a precursor to making functional changes. Current supported engines and the index versions: | engines | issues | code | | - | - | - | | db | Just a wrapper for database queries, doesn't need version | - | | bleve | The version of index is **2** | The version of index is **6** | | elasticsearch | The old index has no version, will be treated as version **0** in this PR | The version of index is **1** | | meilisearch | The old index has no version, will be treated as version **0** in this PR | - | ## Changes ### Split Splited it into mutiple packages ```text indexer ├── internal │   ├── bleve │   ├── db │   ├── elasticsearch │   └── meilisearch ├── code │   ├── bleve │   ├── elasticsearch │   └── internal └── issues ├── bleve ├── db ├── elasticsearch ├── internal └── meilisearch ``` - `indexer/interanal`: Internal shared package for indexer. - `indexer/interanal/[engine]`: Internal shared package for each engine (bleve/db/elasticsearch/meilisearch). - `indexer/code`: Implementations for code indexer. - `indexer/code/internal`: Internal shared package for code indexer. - `indexer/code/[engine]`: Implementation via each engine for code indexer. - `indexer/issues`: Implementations for issues indexer. ### Deduplication - Combine `Init/Ping/Close` for code indexer and issues indexer. - ~Combine `issues.indexerHolder` and `code.wrappedIndexer` to `internal.IndexHolder`.~ Remove it, use dummy indexer instead when the indexer is not ready. - Duplicate two copies of creating ES clients. - Duplicate two copies of `indexerID()`. ### Enhancement - [x] Support index version for elasticsearch issues indexer, the old index without version will be treated as version 0. - [x] Fix spell of `elastic_search/ElasticSearch`, it should be `Elasticsearch`. - [x] Improve versioning of ES index. We don't need `Aliases`: - Gitea does't need aliases for "Zero Downtime" because it never delete old indexes. - The old code of issues indexer uses the orignal name to create issue index, so it's tricky to convert it to an alias. - [x] Support index version for meilisearch issues indexer, the old index without version will be treated as version 0. - [x] Do "ping" only when `Ping` has been called, don't ping periodically and cache the status. - [x] Support the context parameter whenever possible. - [x] Fix outdated example config. - [x] Give up the requeue logic of issues indexer: When indexing fails, call Ping to check if it was caused by the engine being unavailable, and only requeue the task if the engine is unavailable. - It is fragile and tricky, could cause data losing (It did happen when I was doing some tests for this PR). And it works for ES only. - Just always requeue the failed task, if it caused by bad data, it's a bug of Gitea which should be fixed. --------- Co-authored-by: Giteabot <teabot@gitea.io>
2023-06-23 12:37:56 +00:00
if code_indexer.IsAvailable(ctx) {
ctx.ServerError("SearchResults", err)
return
}
ctx.Data["CodeIndexerUnavailable"] = true
} else {
Refactor indexer (#25174) Refactor `modules/indexer` to make it more maintainable. And it can be easier to support more features. I'm trying to solve some of issue searching, this is a precursor to making functional changes. Current supported engines and the index versions: | engines | issues | code | | - | - | - | | db | Just a wrapper for database queries, doesn't need version | - | | bleve | The version of index is **2** | The version of index is **6** | | elasticsearch | The old index has no version, will be treated as version **0** in this PR | The version of index is **1** | | meilisearch | The old index has no version, will be treated as version **0** in this PR | - | ## Changes ### Split Splited it into mutiple packages ```text indexer ├── internal │   ├── bleve │   ├── db │   ├── elasticsearch │   └── meilisearch ├── code │   ├── bleve │   ├── elasticsearch │   └── internal └── issues ├── bleve ├── db ├── elasticsearch ├── internal └── meilisearch ``` - `indexer/interanal`: Internal shared package for indexer. - `indexer/interanal/[engine]`: Internal shared package for each engine (bleve/db/elasticsearch/meilisearch). - `indexer/code`: Implementations for code indexer. - `indexer/code/internal`: Internal shared package for code indexer. - `indexer/code/[engine]`: Implementation via each engine for code indexer. - `indexer/issues`: Implementations for issues indexer. ### Deduplication - Combine `Init/Ping/Close` for code indexer and issues indexer. - ~Combine `issues.indexerHolder` and `code.wrappedIndexer` to `internal.IndexHolder`.~ Remove it, use dummy indexer instead when the indexer is not ready. - Duplicate two copies of creating ES clients. - Duplicate two copies of `indexerID()`. ### Enhancement - [x] Support index version for elasticsearch issues indexer, the old index without version will be treated as version 0. - [x] Fix spell of `elastic_search/ElasticSearch`, it should be `Elasticsearch`. - [x] Improve versioning of ES index. We don't need `Aliases`: - Gitea does't need aliases for "Zero Downtime" because it never delete old indexes. - The old code of issues indexer uses the orignal name to create issue index, so it's tricky to convert it to an alias. - [x] Support index version for meilisearch issues indexer, the old index without version will be treated as version 0. - [x] Do "ping" only when `Ping` has been called, don't ping periodically and cache the status. - [x] Support the context parameter whenever possible. - [x] Fix outdated example config. - [x] Give up the requeue logic of issues indexer: When indexing fails, call Ping to check if it was caused by the engine being unavailable, and only requeue the task if the engine is unavailable. - It is fragile and tricky, could cause data losing (It did happen when I was doing some tests for this PR). And it works for ES only. - Just always requeue the failed task, if it caused by bad data, it's a bug of Gitea which should be fixed. --------- Co-authored-by: Giteabot <teabot@gitea.io>
2023-06-23 12:37:56 +00:00
ctx.Data["CodeIndexerUnavailable"] = !code_indexer.IsAvailable(ctx)
}
loadRepoIDs := make([]int64, 0, len(searchResults))
for _, result := range searchResults {
var find bool
for _, id := range loadRepoIDs {
if id == result.RepoID {
find = true
break
}
}
if !find {
loadRepoIDs = append(loadRepoIDs, result.RepoID)
}
}
repoMaps, err := repo_model.GetRepositoriesMapByIDs(loadRepoIDs)
if err != nil {
ctx.ServerError("GetRepositoriesMapByIDs", err)
return
}
ctx.Data["RepoMaps"] = repoMaps
}
ctx.Data["SearchResults"] = searchResults
ctx.Data["SearchResultLanguages"] = searchResultLanguages
pager := context.NewPagination(total, setting.UI.RepoSearchPagingNum, page, 5)
pager.SetDefaultParams(ctx)
pager.AddParam(ctx, "l", "Language")
ctx.Data["Page"] = pager
ctx.HTML(http.StatusOK, tplUserCode)
}