package memoryx

import (
	"context"
	"fmt"
	"github.com/tmc/langchaingo/callbacks"
	"github.com/tmc/langchaingo/chains"
	"github.com/tmc/langchaingo/llms"
	"github.com/tmc/langchaingo/memory"
	"github.com/tmc/langchaingo/prompts"
)

type SummaryBuffer struct {
	*memory.ConversationBuffer
	chains.Chain

	MaxTokenLimit    int
	callbacksHandler callbacks.Handler
	buffer           llms.ChatMessage
	messageTypeAi    string
	messageTypeHuman string
}

func NewSummaryBuffer(llms llms.Model, maxTokenLimit int, opts ...Option) *SummaryBuffer {
	opt := applyChatSummaryOptions(opts...)

	return &SummaryBuffer{
		Chain: chains.NewLLMChain(llms, prompts.NewPromptTemplate(opt.prompt, []string{
			"summary", "new_lines",
		}), chains.WithCallback(opt.callbacksHandler)),
		MaxTokenLimit:      maxTokenLimit,
		ConversationBuffer: opt.ConversationBuffer,
		callbacksHandler:   opt.callbacksHandler,
		buffer:             nil,
		messageTypeAi:      opt.messageTypeAi,
		messageTypeHuman:   opt.messageTypeHuman,
	}
}

func (s *SummaryBuffer) GetMemoryKey(ctx context.Context) string {
	return s.ConversationBuffer.GetMemoryKey(ctx)
}

func (s *SummaryBuffer) MemoryVariables(ctx context.Context) []string {
	return s.ConversationBuffer.MemoryVariables(ctx)
}

func (s *SummaryBuffer) LoadMemoryVariables(ctx context.Context, inputs map[string]any) (map[string]any, error) {
	var (
		res []llms.ChatMessage
		err error
	)
	if s.buffer != nil {
		res = append(res, s.buffer)
	}

	messages, err := s.ChatHistory.Messages(ctx)
	if err != nil {
		return nil, err
	}

	res = append(res, messages...)
	if s.ReturnMessages {
		return map[string]any{
			s.MemoryKey: res,
		}, nil
	}

	bufferString, err := llms.GetBufferString(res, s.HumanPrefix, s.AIPrefix)
	if err != nil {
		return nil, err
	}

	return map[string]any{
		s.MemoryKey: bufferString,
	}, nil
}

func (s *SummaryBuffer) SaveContext(ctx context.Context, inputs map[string]any, outputs map[string]any) error {
	// save
	userInputValue, err := memory.GetInputValue(inputs, s.InputKey)
	if err != nil {
		return err
	}
	err = s.ChatHistory.AddUserMessage(ctx, userInputValue)
	if err != nil {
		return err
	}
	aiOutputValue, err := memory.GetInputValue(outputs, s.OutputKey)
	if err != nil {
		return err
	}
	err = s.ChatHistory.AddAIMessage(ctx, aiOutputValue)
	if err != nil {
		return err
	}

	// check
	messages, err := s.ChatHistory.Messages(ctx)
	if err != nil {
		return err
	}

	bufferString, err := llms.GetBufferString(
		messages,
		s.ConversationBuffer.HumanPrefix,
		s.ConversationBuffer.AIPrefix,
	)
	if err != nil {
		return err
	}

	if llms.CountTokens("", bufferString) <= s.MaxTokenLimit {
		// 未超过上限
		return nil
	}

	var summary string
	for _, message := range messages {
		switch message.GetType() {
		case llms.ChatMessageTypeAI:
			summary += fmt.Sprintf("%s: %s\n", s.messageTypeAi, message.GetContent())
		case llms.ChatMessageTypeHuman:
			summary += fmt.Sprintf("%s: %s\n", s.messageTypeHuman, message.GetContent())
		}
	}

	var newLines string
	if s.buffer != nil {
		newLines = s.buffer.GetContent()
	}

	newSummary, err := chains.Predict(ctx, s.Chain, map[string]any{
		"summary":   summary,
		"new_lines": newLines,
	}, chains.WithCallback(s.callbacksHandler))
	if err != nil {
		return err
	}

	s.buffer = &llms.SystemChatMessage{Content: newSummary}

	return s.ChatHistory.SetMessages(ctx, nil)
}

func (s *SummaryBuffer) Clear(ctx context.Context) error {
	s.buffer = nil
	return s.ConversationBuffer.Clear(ctx)
}

func (s *SummaryBuffer) getNumTokensFromMessages(ctx context.Context) (int, error) {
	messages, err := s.ChatHistory.Messages(ctx)
	if err != nil {
		return 0, err
	}

	bufferString, err := llms.GetBufferString(
		messages,
		s.ConversationBuffer.HumanPrefix,
		s.ConversationBuffer.AIPrefix,
	)
	if err != nil {
		return 0, err
	}

	return llms.CountTokens("", bufferString), nil
}
