package blevejiebaplugin

import (
	"errors"

	"github.com/blevesearch/bleve/v2/analysis"
	"github.com/blevesearch/bleve/v2/registry"
	"github.com/wangbin/jiebago"
)

/**
Bleve中文分词支持
*/

type JiebaAnalyzer struct {
}

func analyzerConstructor(config map[string]interface{}, cache *registry.Cache) (*analysis.Analyzer, error) {
	tokenizerName, ok := config["tokenizer"].(string)
	if !ok {
		return nil, errors.New("must specify tokenizer")
	}
	tokenizer, err := cache.TokenizerNamed(tokenizerName)
	if err != nil {
		return nil, err
	}
	alz := &analysis.Analyzer{
		Tokenizer: tokenizer,
	}
	return alz, nil
}

type JiebaTokenizer struct {
	handle jiebago.Segmenter
}

func NewJiebaTokenizer(dictpath string) *JiebaTokenizer {
	var seg jiebago.Segmenter
	seg.LoadDictionary(dictpath)
	return &JiebaTokenizer{seg}
}

func (x *JiebaTokenizer) Tokenize(sentence []byte) analysis.TokenStream {
	result := make(analysis.TokenStream, 0)
	pos := 1
	start := 0
	for word := range x.handle.Cut(string(sentence), false) {
		token := analysis.Token{
			Term:     []byte(word),
			Start:    start,
			End:      start + len(word),
			Position: pos,
			Type:     analysis.Ideographic,
		}
		result = append(result, &token)
		start += len(word)
		pos++
	}
	return result
}

func tokenizerConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.Tokenizer, error) {
	dictpath, ok := config["dict"].(string)
	if !ok {
		return nil, errors.New("config dictpath not found")
	}
	return NewJiebaTokenizer(dictpath), nil
}

func init() {
	registry.RegisterAnalyzer("jieba", analyzerConstructor)
	registry.RegisterTokenizer("jieba", tokenizerConstructor)
}
