package memoryx

import (
	"context"
	"fmt"
	"github.com/tmc/langchaingo/callbacks"
	"github.com/tmc/langchaingo/chains"
	"github.com/tmc/langchaingo/llms"
	"github.com/tmc/langchaingo/memory"
)

type Summary struct {
	*memory.ConversationBuffer
	chains.Chain
	callbacksHandler callbacks.Handler
}

func NewSummary(llms llms.Model, opts ...Option) *Summary {
	opt := applyChatSummaryOptions(opts...)

	return &Summary{
		Chain:              chains.NewLLMChain(llms, createSummaryPrompt(), chains.WithCallback(opt.callbacksHandler)),
		ConversationBuffer: opt.ConversationBuffer,
		callbacksHandler:   opt.callbacksHandler,
	}
}

func (s *Summary) GetMemoryKey(ctx context.Context) string {
	return s.ConversationBuffer.GetMemoryKey(ctx)
}

func (s *Summary) MemoryVariables(ctx context.Context) []string {
	return s.ConversationBuffer.MemoryVariables(ctx)
}

func (s *Summary) LoadMemoryVariables(ctx context.Context, inputs map[string]any) (map[string]any, error) {
	return s.ConversationBuffer.LoadMemoryVariables(ctx, inputs)
}

func (s *Summary) SaveContext(ctx context.Context, inputs map[string]any, outputs map[string]any) error {
	// get summary
	message, err := s.LoadMemoryVariables(ctx, inputs)
	if err != nil {
		return err
	}
	summary := message[s.MemoryKey]

	// new lines of conversation
	userInputValue, err := memory.GetInputValue(inputs, s.InputKey)
	if err != nil {
		return err
	}
	aiOutputValue, err := memory.GetInputValue(outputs, s.OutputKey)
	if err != nil {
		return err
	}
	
	newLines := fmt.Sprintf("Homan: %s\nAi: %s", userInputValue, aiOutputValue)

	// summary
	newSummary, err := chains.Predict(ctx, s.Chain, map[string]any{
		"summary":   summary,
		"new_lines": newLines,
	}, chains.WithCallback(s.callbacksHandler))
	if err != nil {
		return err
	}

	// save
	return s.ChatHistory.AddMessage(ctx, llms.SystemChatMessage{Content: newSummary})
}

func (s *Summary) Clear(ctx context.Context) error {
	return s.ConversationBuffer.Clear(ctx)
}
