mirror of
https://codeberg.org/forgejo/forgejo.git
synced 2024-11-10 01:05:14 +00:00
Remove unique filter from repo indexer analyzer. (#7878)
* Remove unique filter from repo indexer analyzer. * Bump repoIndexerLatestVersion to 4 * Corrrect fmt * make vendor to remove unique dependency
This commit is contained in:
parent
85202d4784
commit
5661773018
|
@ -13,7 +13,6 @@ import (
|
|||
"github.com/blevesearch/bleve"
|
||||
"github.com/blevesearch/bleve/analysis/analyzer/custom"
|
||||
"github.com/blevesearch/bleve/analysis/token/lowercase"
|
||||
"github.com/blevesearch/bleve/analysis/token/unique"
|
||||
"github.com/blevesearch/bleve/analysis/tokenizer/unicode"
|
||||
"github.com/blevesearch/bleve/search/query"
|
||||
"github.com/ethantkoenig/rupture"
|
||||
|
@ -23,7 +22,7 @@ const (
|
|||
repoIndexerAnalyzer = "repoIndexerAnalyzer"
|
||||
repoIndexerDocType = "repoIndexerDocType"
|
||||
|
||||
repoIndexerLatestVersion = 3
|
||||
repoIndexerLatestVersion = 4
|
||||
)
|
||||
|
||||
// repoIndexer (thread-safe) index for repository contents
|
||||
|
@ -110,7 +109,7 @@ func createRepoIndexer(path string, latestVersion int) error {
|
|||
"type": custom.Name,
|
||||
"char_filters": []string{},
|
||||
"tokenizer": unicode.Name,
|
||||
"token_filters": []string{unicodeNormalizeName, lowercase.Name, unique.Name},
|
||||
"token_filters": []string{unicodeNormalizeName, lowercase.Name},
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
53
vendor/github.com/blevesearch/bleve/analysis/token/unique/unique.go
generated
vendored
53
vendor/github.com/blevesearch/bleve/analysis/token/unique/unique.go
generated
vendored
|
@ -1,53 +0,0 @@
|
|||
// Copyright (c) 2018 Couchbase, Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package unique
|
||||
|
||||
import (
|
||||
"github.com/blevesearch/bleve/analysis"
|
||||
"github.com/blevesearch/bleve/registry"
|
||||
)
|
||||
|
||||
const Name = "unique"
|
||||
|
||||
// UniqueTermFilter retains only the tokens which mark the first occurrence of
|
||||
// a term. Tokens whose term appears in a preceding token are dropped.
|
||||
type UniqueTermFilter struct{}
|
||||
|
||||
func NewUniqueTermFilter() *UniqueTermFilter {
|
||||
return &UniqueTermFilter{}
|
||||
}
|
||||
|
||||
func (f *UniqueTermFilter) Filter(input analysis.TokenStream) analysis.TokenStream {
|
||||
encounteredTerms := make(map[string]struct{}, len(input)/4)
|
||||
j := 0
|
||||
for _, token := range input {
|
||||
term := string(token.Term)
|
||||
if _, ok := encounteredTerms[term]; ok {
|
||||
continue
|
||||
}
|
||||
encounteredTerms[term] = struct{}{}
|
||||
input[j] = token
|
||||
j++
|
||||
}
|
||||
return input[:j]
|
||||
}
|
||||
|
||||
func UniqueTermFilterConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.TokenFilter, error) {
|
||||
return NewUniqueTermFilter(), nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
registry.RegisterTokenFilter(Name, UniqueTermFilterConstructor)
|
||||
}
|
1
vendor/modules.txt
vendored
1
vendor/modules.txt
vendored
|
@ -26,7 +26,6 @@ github.com/blevesearch/bleve
|
|||
github.com/blevesearch/bleve/analysis/analyzer/custom
|
||||
github.com/blevesearch/bleve/analysis/token/lowercase
|
||||
github.com/blevesearch/bleve/analysis/token/unicodenorm
|
||||
github.com/blevesearch/bleve/analysis/token/unique
|
||||
github.com/blevesearch/bleve/analysis/tokenizer/unicode
|
||||
github.com/blevesearch/bleve/index/upsidedown
|
||||
github.com/blevesearch/bleve/mapping
|
||||
|
|
Loading…
Reference in a new issue