package services

import (
	"context"
	"demo2/server/agent"
)

// LLMServiceWrapper LLM服务包装器，实现agent.LLMServiceInterface
type LLMServiceWrapper struct {
	llmService *LLMService
}

// NewLLMServiceWrapper 创建LLM服务包装器
func NewLLMServiceWrapper(llmService *LLMService) *LLMServiceWrapper {
	return &LLMServiceWrapper{
		llmService: llmService,
	}
}

// GenerateResponse 生成回复
func (w *LLMServiceWrapper) GenerateResponse(ctx context.Context, messages []agent.DeepSeekMessage) (string, error) {
	// 转换消息格式
	serviceMessages := make([]DeepSeekMessage, len(messages))
	for i, msg := range messages {
		serviceMessages[i] = DeepSeekMessage{
			Role:    msg.Role,
			Content: msg.Content,
		}
	}

	return w.llmService.GenerateResponse(ctx, serviceMessages)
}

// GenerateStreamResponse 生成流式回复
func (w *LLMServiceWrapper) GenerateStreamResponse(ctx context.Context, messages []agent.DeepSeekMessage, responseChan chan<- string) error {
	// 转换消息格式
	serviceMessages := make([]DeepSeekMessage, len(messages))
	for i, msg := range messages {
		serviceMessages[i] = DeepSeekMessage{
			Role:    msg.Role,
			Content: msg.Content,
		}
	}

	return w.llmService.GenerateStreamResponse(ctx, serviceMessages, responseChan)
}
