package memoryx

import (
	"context"
	"github.com/tmc/langchaingo/callbacks"
	"github.com/tmc/langchaingo/chains"
	"github.com/tmc/langchaingo/llms"
	"github.com/tmc/langchaingo/memory"
)

type outParser func(ctx context.Context, input string) string

type SummaryBuffer struct {
	outParser
	*memory.ConversationBuffer
	chain    chains.Chain
	callback callbacks.Handler

	MaxTokenLimit int
	buffer        llms.ChatMessage
}

func NewSummaryBuffer(llms llms.Model, maxTokenLimit int, opts ...Option) *SummaryBuffer {
	opt := newOption(opts...)

	return &SummaryBuffer{
		ConversationBuffer: memory.NewConversationBuffer(),
		chain:              chains.NewLLMChain(llms, createSummaryPrompt(), chains.WithCallback(opt.callback)),
		callback:           opt.callback,
		MaxTokenLimit:      maxTokenLimit,
		buffer:             nil,
		outParser:          opt.outParser,
	}
}

// GetMemoryKey getter for memory key.
func (s *SummaryBuffer) GetMemoryKey(ctx context.Context) string {
	return s.ConversationBuffer.GetMemoryKey(ctx)
}

// MemoryVariables Input keys this memory class will load dynamically.
func (s *SummaryBuffer) MemoryVariables(ctx context.Context) []string {
	return s.ConversationBuffer.MemoryVariables(ctx)
}

// LoadMemoryVariables Return key-value pairs given the text input to the chain.
// If None, return all memories
// 从内存中加载历史对话记录
func (s *SummaryBuffer) LoadMemoryVariables(ctx context.Context, inputs map[string]any) (map[string]any, error) {
	var (
		res []llms.ChatMessage
		err error
	)
	if s.buffer != nil {
		res = append(res, s.buffer)
	}

	message, err := s.ChatHistory.Messages(ctx)
	if err != nil {
		return nil, err
	}
	res = append(res, message...)

	bufferString, err := llms.GetBufferString(res, s.HumanPrefix, s.AIPrefix)
	if err != nil {
		return nil, err
	}

	return map[string]any{
		s.MemoryKey: bufferString,
	}, nil
}

// SaveContext Save the context of this model run to memory.
func (s *SummaryBuffer) SaveContext(ctx context.Context, inputs map[string]any, outputs map[string]any) error {
	// save

	userInputValue, err := memory.GetInputValue(inputs, s.InputKey)
	if err != nil {
		return err
	}
	err = s.ChatHistory.AddUserMessage(ctx, userInputValue)
	if err != nil {
		return err
	}
	aiOutPutValue, err := memory.GetInputValue(outputs, s.OutputKey)
	if err != nil {
		return err
	}
	if s.outParser != nil {
		aiOutPutValue = s.outParser(ctx, aiOutPutValue)
	}

	err = s.ChatHistory.AddAIMessage(ctx, aiOutPutValue)
	if err != nil {
		return err
	}

	// check

	messages, err := s.ChatHistory.Messages(ctx)
	if err != nil {
		return err
	}
	bufferString, err := llms.GetBufferString(messages, s.ConversationBuffer.HumanPrefix, s.ConversationBuffer.AIPrefix)
	if err != nil {
		return err
	}

	if llms.CountTokens("", bufferString) <= s.MaxTokenLimit {
		// 没有超过上限
		return nil
	}

	// summary

	var newLines string
	if s.buffer != nil {
		newLines = s.buffer.GetContent()
	}
	// 更换新的summary
	newSummary, err := chains.Predict(ctx, s.chain, map[string]any{
		"summary":   bufferString,
		"new_lines": newLines,
	})
	if err != nil {
		return err
	}

	s.buffer = &llms.SystemChatMessage{
		Content: newSummary,
	}

	return s.ChatHistory.SetMessages(ctx, nil)

}

// Clear memory contents.
func (s *SummaryBuffer) Clear(ctx context.Context) error {
	s.buffer = nil
	return s.ConversationBuffer.Clear(ctx)
}
