package ai

import (
	"context"
	"github.com/milvus-io/milvus-sdk-go/v2/entity"
	"github.com/tmc/langchaingo/embeddings"

	"gitee.com/dn-jinmin/chatcms/internal/logic/ai/base"
	"gitee.com/dn-jinmin/chatcms/internal/svc"
	"github.com/milvus-io/milvus-sdk-go/v2/client"
	"github.com/tmc/langchaingo/chains"
	"github.com/tmc/langchaingo/vectorstores"
	"github.com/tmc/langchaingo/vectorstores/milvus"
)

type Reference struct {
	*base.Base

	store vectorstores.VectorStore

	svc *svc.ServiceContext
}

func NewReference(svc *svc.ServiceContext) *Reference {
	return &Reference{svc: svc}
}

func (r *Reference) Name() string {
	return "reference"
}

func (r *Reference) Description() string {
	return `根据用户的问题获取相匹配的话术，返回相匹配的话术`
}

func (r *Reference) Call(ctx context.Context, input string) (string, error) {
	if r.store == nil {
		idx, err := entity.NewIndexIvfFlat(entity.L2, 1024)
		if err != nil {
			return "", err
		}
		embedder, err := embeddings.NewEmbedder(r.svc.Embedder)
		if err != nil {
			return "", err
		}

		r.store, err = milvus.New(ctx, client.Config{
			Address: r.svc.Config.Milvus.Uri,
		},
			milvus.WithEmbedder(embedder),
			milvus.WithIndex(idx))
		if err != nil {
			return "", err
		}
	}

	var opts []chains.ChainCallOption
	model := ctx.Value(base.InputModel).(string)
	if model != "" {
		opts = append(opts, chains.WithModel(model))
	}

	qa := chains.NewRetrievalQAFromLLM(r.svc.DefaultLLM, vectorstores.ToRetriever(r.store, 1))
	return chains.Predict(ctx, qa, map[string]any{
		"query": input,
	}, opts...)
}
