/**
 * @author: dn-jinmin/dn-jinmin
 * @doc:
 */

package toolx

import (
	"aiworkc/internal/domain"
	"aiworkc/internal/svc"
	"aiworkc/pkg/langchain"
	"context"
	"fmt"
	"github.com/tmc/langchaingo/callbacks"
	"github.com/tmc/langchaingo/chains"
	"github.com/tmc/langchaingo/prompts"
)

const (
	OUT_PROMPT_TEMPLATE = `<< instructions >>
- Your response should follow the JSON format.
- Your response should have the following structure: {"chatType": {{.chatType}}, "data": {{.data}} }
- "chatType" this is a fixed output`
)

type Empty struct {
	c        chains.Chain
	callback callbacks.Handler
}

func NewDefaultHandler(svc *svc.ServiceContext) *Empty {
	template := "you are an all-round assistant, please help me answer this question: \n\n<< input >>\n{{.input}}"

	prompt := prompts.PromptTemplate{
		Template:       template + "\n\n" + OUT_PROMPT_TEMPLATE,
		InputVariables: []string{langchain.Input},
		TemplateFormat: prompts.TemplateFormatGoTemplate,
		PartialVariables: map[string]any{
			"chatType": fmt.Sprintf("%d", domain.DefaultHandler),
			"data":     "solution",
		},
	}

	return &Empty{
		c: chains.NewLLMChain(svc.LLMs, prompt, chains.WithCallback(svc.Callbacks)),
	}
}

func (e *Empty) Name() string {
	return "default"
}

func (e *Empty) Description() string {
	return "这是一个默认的程序，在没有合适的选择时就选择它，在使用的时候请携带所有历史记录"
}

func (e *Empty) Call(ctx context.Context, input string) (string, error) {
	fmt.Println("empty -- call -- start --- ", input)
	out, err := chains.Predict(ctx, e.c, map[string]any{
		langchain.Input: input,
	})

	if err != nil {
		return "", err
	}

	return SuccessWithData + out + "\n\n\n", nil
}
