123456789101112131415161718192021222324252627 |
- package search
- import "github.com/blevesearch/bleve/analysis"
- type CommaTokenizer struct{}
- func (ct *CommaTokenizer) Tokenize(sentence []byte) analysis.TokenStream {
- result := make(analysis.TokenStream, 0)
- pos := 1
- lastIdx := 0
- for i, length := 0, len(sentence); i < length; i++ {
- if sentence[i] == ',' {
- token := analysis.Token{
- Term: sentence[lastIdx:i],
- Start: lastIdx,
- End: i - 1,
- Position: pos,
- Type: analysis.Ideographic,
- KeyWord: true,
- }
- pos++
- lastIdx = i - 1
- result = append(result, &token)
- }
- }
- return result
- }
|