2019-02-19 14:39:39 +00:00
|
|
|
// Copyright 2018 The Gitea Authors. All rights reserved.
|
|
|
|
// Use of this source code is governed by a MIT-style
|
|
|
|
// license that can be found in the LICENSE file.
|
|
|
|
|
|
|
|
package issues
|
|
|
|
|
2019-02-21 00:54:05 +00:00
|
|
|
import (
|
|
|
|
"fmt"
|
|
|
|
|
|
|
|
"code.gitea.io/gitea/models"
|
|
|
|
"code.gitea.io/gitea/modules/log"
|
|
|
|
"code.gitea.io/gitea/modules/setting"
|
|
|
|
"code.gitea.io/gitea/modules/util"
|
|
|
|
)
|
|
|
|
|
2019-02-19 14:39:39 +00:00
|
|
|
// IndexerData data stored in the issue indexer
|
|
|
|
type IndexerData struct {
|
|
|
|
ID int64
|
|
|
|
RepoID int64
|
|
|
|
Title string
|
|
|
|
Content string
|
|
|
|
Comments []string
|
|
|
|
IsDelete bool
|
|
|
|
IDs []int64
|
|
|
|
}
|
|
|
|
|
|
|
|
// Match represents on search result
|
|
|
|
type Match struct {
|
|
|
|
ID int64 `json:"id"`
|
|
|
|
RepoID int64 `json:"repo_id"`
|
|
|
|
Score float64 `json:"score"`
|
|
|
|
}
|
|
|
|
|
|
|
|
// SearchResult represents search results
|
|
|
|
type SearchResult struct {
|
2019-02-21 05:01:28 +00:00
|
|
|
Total int64
|
|
|
|
Hits []Match
|
2019-02-19 14:39:39 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Indexer defines an inteface to indexer issues contents
|
|
|
|
type Indexer interface {
|
|
|
|
Init() (bool, error)
|
|
|
|
Index(issue []*IndexerData) error
|
|
|
|
Delete(ids ...int64) error
|
|
|
|
Search(kw string, repoID int64, limit, start int) (*SearchResult, error)
|
|
|
|
}
|
2019-02-21 00:54:05 +00:00
|
|
|
|
|
|
|
var (
|
2019-04-08 09:05:15 +00:00
|
|
|
// issueIndexerQueue queue of issue ids to be updated
|
|
|
|
issueIndexerQueue Queue
|
|
|
|
issueIndexer Indexer
|
2019-02-21 00:54:05 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
// InitIssueIndexer initialize issue indexer, syncReindex is true then reindex until
|
|
|
|
// all issue index done.
|
|
|
|
func InitIssueIndexer(syncReindex bool) error {
|
|
|
|
var populate bool
|
2019-02-21 05:01:28 +00:00
|
|
|
var dummyQueue bool
|
2019-02-21 00:54:05 +00:00
|
|
|
switch setting.Indexer.IssueType {
|
|
|
|
case "bleve":
|
|
|
|
issueIndexer = NewBleveIndexer(setting.Indexer.IssuePath)
|
|
|
|
exist, err := issueIndexer.Init()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
populate = !exist
|
2019-02-21 05:01:28 +00:00
|
|
|
case "db":
|
|
|
|
issueIndexer = &DBIndexer{}
|
|
|
|
dummyQueue = true
|
2019-02-21 00:54:05 +00:00
|
|
|
default:
|
|
|
|
return fmt.Errorf("unknow issue indexer type: %s", setting.Indexer.IssueType)
|
|
|
|
}
|
|
|
|
|
2019-02-21 05:01:28 +00:00
|
|
|
if dummyQueue {
|
2019-04-08 09:05:15 +00:00
|
|
|
issueIndexerQueue = &DummyQueue{}
|
2019-02-21 05:01:28 +00:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2019-02-21 00:54:05 +00:00
|
|
|
var err error
|
2019-04-08 09:05:15 +00:00
|
|
|
switch setting.Indexer.IssueQueueType {
|
2019-02-21 00:54:05 +00:00
|
|
|
case setting.LevelQueueType:
|
2019-04-08 09:05:15 +00:00
|
|
|
issueIndexerQueue, err = NewLevelQueue(
|
2019-02-21 00:54:05 +00:00
|
|
|
issueIndexer,
|
2019-04-08 09:05:15 +00:00
|
|
|
setting.Indexer.IssueQueueDir,
|
|
|
|
setting.Indexer.IssueQueueBatchNumber)
|
2019-02-21 00:54:05 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
case setting.ChannelQueueType:
|
2019-04-08 09:05:15 +00:00
|
|
|
issueIndexerQueue = NewChannelQueue(issueIndexer, setting.Indexer.IssueQueueBatchNumber)
|
|
|
|
case setting.RedisQueueType:
|
|
|
|
addrs, pass, idx, err := parseConnStr(setting.Indexer.IssueQueueConnStr)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
issueIndexerQueue, err = NewRedisQueue(addrs, pass, idx, issueIndexer, setting.Indexer.IssueQueueBatchNumber)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2019-02-21 00:54:05 +00:00
|
|
|
default:
|
2019-04-08 09:05:15 +00:00
|
|
|
return fmt.Errorf("Unsupported indexer queue type: %v", setting.Indexer.IssueQueueType)
|
2019-02-21 00:54:05 +00:00
|
|
|
}
|
|
|
|
|
2019-06-12 19:41:28 +00:00
|
|
|
go func() {
|
|
|
|
err = issueIndexerQueue.Run()
|
|
|
|
if err != nil {
|
|
|
|
log.Error("issueIndexerQueue.Run: %v", err)
|
|
|
|
}
|
|
|
|
}()
|
2019-02-21 00:54:05 +00:00
|
|
|
|
|
|
|
if populate {
|
|
|
|
if syncReindex {
|
|
|
|
populateIssueIndexer()
|
|
|
|
} else {
|
|
|
|
go populateIssueIndexer()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// populateIssueIndexer populate the issue indexer with issue data
|
|
|
|
func populateIssueIndexer() {
|
|
|
|
for page := 1; ; page++ {
|
|
|
|
repos, _, err := models.SearchRepositoryByName(&models.SearchRepoOptions{
|
|
|
|
Page: page,
|
|
|
|
PageSize: models.RepositoryListDefaultPageSize,
|
|
|
|
OrderBy: models.SearchOrderByID,
|
|
|
|
Private: true,
|
|
|
|
Collaborate: util.OptionalBoolFalse,
|
|
|
|
})
|
|
|
|
if err != nil {
|
2019-04-02 07:48:31 +00:00
|
|
|
log.Error("SearchRepositoryByName: %v", err)
|
2019-02-21 00:54:05 +00:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
if len(repos) == 0 {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, repo := range repos {
|
2019-12-13 05:07:11 +00:00
|
|
|
UpdateRepoIndexer(repo)
|
2019-02-21 00:54:05 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-12-13 05:07:11 +00:00
|
|
|
// UpdateRepoIndexer add/update all issues of the repositories
|
|
|
|
func UpdateRepoIndexer(repo *models.Repository) {
|
|
|
|
is, err := models.Issues(&models.IssuesOptions{
|
|
|
|
RepoIDs: []int64{repo.ID},
|
|
|
|
IsClosed: util.OptionalBoolNone,
|
|
|
|
IsPull: util.OptionalBoolNone,
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
log.Error("Issues: %v", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
if err = models.IssueList(is).LoadDiscussComments(); err != nil {
|
|
|
|
log.Error("LoadComments: %v", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
for _, issue := range is {
|
|
|
|
UpdateIssueIndexer(issue)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-21 00:54:05 +00:00
|
|
|
// UpdateIssueIndexer add/update an issue to the issue indexer
|
|
|
|
func UpdateIssueIndexer(issue *models.Issue) {
|
|
|
|
var comments []string
|
|
|
|
for _, comment := range issue.Comments {
|
|
|
|
if comment.Type == models.CommentTypeComment {
|
|
|
|
comments = append(comments, comment.Content)
|
|
|
|
}
|
|
|
|
}
|
2019-06-12 19:41:28 +00:00
|
|
|
_ = issueIndexerQueue.Push(&IndexerData{
|
2019-02-21 00:54:05 +00:00
|
|
|
ID: issue.ID,
|
|
|
|
RepoID: issue.RepoID,
|
|
|
|
Title: issue.Title,
|
|
|
|
Content: issue.Content,
|
|
|
|
Comments: comments,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
// DeleteRepoIssueIndexer deletes repo's all issues indexes
|
|
|
|
func DeleteRepoIssueIndexer(repo *models.Repository) {
|
|
|
|
var ids []int64
|
|
|
|
ids, err := models.GetIssueIDsByRepoID(repo.ID)
|
|
|
|
if err != nil {
|
2019-04-02 07:48:31 +00:00
|
|
|
log.Error("getIssueIDsByRepoID failed: %v", err)
|
2019-02-21 00:54:05 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2019-06-12 19:41:28 +00:00
|
|
|
if len(ids) == 0 {
|
2019-02-21 00:54:05 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2019-06-12 19:41:28 +00:00
|
|
|
_ = issueIndexerQueue.Push(&IndexerData{
|
2019-02-21 00:54:05 +00:00
|
|
|
IDs: ids,
|
|
|
|
IsDelete: true,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
// SearchIssuesByKeyword search issue ids by keywords and repo id
|
|
|
|
func SearchIssuesByKeyword(repoID int64, keyword string) ([]int64, error) {
|
|
|
|
var issueIDs []int64
|
|
|
|
res, err := issueIndexer.Search(keyword, repoID, 1000, 0)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
for _, r := range res.Hits {
|
|
|
|
issueIDs = append(issueIDs, r.ID)
|
|
|
|
}
|
|
|
|
return issueIDs, nil
|
|
|
|
}
|