package agent

import (
	"fmt"

	"github.com/google/uuid"
	"techoiceness.com/aiagent/llm-gateway/internal/llm/types"
	"techoiceness.com/aiagent/llm-gateway/internal/server/sse"

	"github.com/rs/zerolog/log"
)

func LLMResponseToChatResponse(resp *types.Response, conversationID uuid.UUID) (*sse.ChatResponse, error) {
	if resp == nil {
		log.Error().
			Str("process", "LLMResponseToChatResponse").
			Str("conversation_id", conversationID.String()).
			Msg("LLM响应为空")
		return sse.NewChatResponse(conversationID, sse.StatusError).
			WithError("INVALID_RESPONSE", "响应为空", "LLM 返回了空响应", true), nil
	}

	if len(resp.Choices) == 0 {
		log.Error().
			Str("process", "LLMResponseToChatResponse").
			Str("conversation_id", conversationID.String()).
			Msg("LLM响应没有选择项")
		return sse.NewChatResponse(conversationID, sse.StatusError).
			WithError("NO_CHOICES", "没有可用的响应选项", "LLM 返回的响应中没有选项", true), nil
	}

	choice := resp.Choices[0]
	message := choice.Message

	// 处理工具调用
	if len(message.ToolCalls) > 0 {
		toolCall := message.ToolCalls[0] // 取第一个工具调用

		chatResp := sse.NewChatResponse(conversationID, sse.StatusToolCalling).
			WithStatusMessage(fmt.Sprintf("正在调用工具: %s", toolCall.Function.Name))

		return chatResp, nil
	}

	// 处理普通响应
	chatResp := sse.NewChatResponse(conversationID, sse.StatusCompleted).
		WithContent(message.Content)

	if message.ReasoningContent != "" {
		chatResp.ReasoningContent = message.ReasoningContent
	}

	return chatResp, nil
}

func LLMStreamResponseToChatResponse(streamResp *types.StreamResponse, conversationID uuid.UUID) (*sse.ChatResponse, error) {
	if streamResp == nil {
		log.Error().
			Str("process", "LLMStreamResponseToChatResponse").
			Str("conversation_id", conversationID.String()).
			Msg("LLM流响应为空")
		return sse.NewChatResponse(conversationID, sse.StatusError).
			WithError("INVALID_STREAM_RESPONSE", "流式响应为空", "LLM 返回了空的流式响应", true), nil
	}

	if len(streamResp.Choices) == 0 {
		log.Error().
			Str("process", "LLMStreamResponseToChatResponse").
			Str("conversation_id", conversationID.String()).
			Msg("LLM流响应没有选择项")
		return sse.NewChatResponse(conversationID, sse.StatusError).
			WithError("NO_STREAM_CHOICES", "流式响应中没有可用选项", "LLM 返回的流式响应中没有选项", true), nil
	}

	// 处理流式响应错误
	if streamResp.Error != nil {
		log.Error().
			Str("process", "LLMStreamResponseToChatResponse").
			Str("conversation_id", conversationID.String()).
			Err(streamResp.Error).
			Msg("LLM stream response error")
		return sse.NewChatResponse(conversationID, sse.StatusError).
			WithError("STREAM_ERROR", "流式处理错误", streamResp.Error.Error(), true), nil
	}

	choice := streamResp.Choices[0]
	delta := choice.Delta

	// 处理工具调用
	if len(delta.ToolCalls) > 0 {
		toolCall := delta.ToolCalls[0] // 取第一个工具调用

		chatResp := sse.NewChatResponse(conversationID, sse.StatusToolCalling).
			WithStatusMessage(fmt.Sprintf("正在调用工具: %s", toolCall.Function.Name))

		return chatResp, nil
	}

	// 处理增量内容
	chatResp := sse.NewChatResponse(conversationID, sse.StatusStreaming)

	if delta.Content != "" {
		chatResp.WithDeltaContent(delta.Content)
	}

	if delta.ReasoningContent != "" {
		chatResp.WithDeltaReasoningContent(delta.ReasoningContent)
	}

	return chatResp, nil
}
