use super::openai_client::{ChatRequest, ChatResponse, OpenAIClient, ResponseFormat};
use super::tool_functions::function_keys::WRITE_FILE_KEY;
use super::tool_functions::tools_model::{ToolResult, TOOLS_JSON};
use super::tool_functions::{dispatch_function_call, FunctionCallContext};
use crate::models::{ChatMessage, ProjectConfig, Role};
use anyhow::{anyhow, Result};
use log::error;

pub struct ChatLoop;

impl ChatLoop {
    pub async fn run(
        client: &OpenAIClient,
        initial_request: ChatRequest,
        context: &Option<ProjectConfig>,
    ) -> Result<ChatResponse> {
        let mut request = initial_request;
        let mut max_iterations = 10; // 防止无限循环的安全措施

        loop {
            if max_iterations == 0 {
                return Err(anyhow!("Maximum chat iterations reached"));
            }
            max_iterations -= 1;

            println!("---start-openai-request-iterations:{}", max_iterations);

            let response = client.send_request(&request).await?;

            // 重构后的函数，用于处理工具调用的结果
            let tool_results = Self::process_tool_calls(&response, context).await?;

            if tool_results.is_empty() {
                return Ok(response);
            }

            // 准备下一次请求的消息
            let mut new_messages = request.messages.clone();
            new_messages.push(ChatMessage {
                tool_call_id: None,
                role: Role::Assistant,
                content: serde_json::to_string(&response)?,
            });

            // 添加工具调用的结果到下一次请求的消息中
            for tool_result in &tool_results {
                new_messages.push(ChatMessage {
                    tool_call_id: Some(tool_result.tool_call_id.clone()),
                    role: Role::Tool,
                    content: tool_result.content.clone(),
                });
            }

            // 更新请求，确保工具调用的状态被传递
            request = ChatRequest {
                messages: new_messages,
                model: request.model.clone(),
                max_tokens: request.max_tokens,
                temperature: request.temperature,
                response_format: ResponseFormat {
                    r#type: "text".to_string(),
                },
                tools: Some(TOOLS_JSON.clone()),
                tool_calls: None, // 重置 tool_calls，因为下一次请求不需要指定 tool_calls
            };
        }
    }

    /// 处理工具调用的结果，过滤掉不需要的工具调用（如 WRITE_FILE_KEY）
    /// 并返回需要处理的工具调用结果。
    async fn process_tool_calls(
        response: &ChatResponse,
        context: &Option<ProjectConfig>,
    ) -> Result<Vec<ToolResult>> {
        let mut tool_results = Vec::<ToolResult>::new();

        for choice in &response.choices {
            if let Some(tool_calls) = choice.message.tool_calls.as_ref() {
                for tool_call in tool_calls {
                    let function_call = FunctionCallContext {
                        id: tool_call.id.to_string(),
                        name: tool_call.function.name.clone(),
                        arguments: tool_call.function.arguments.clone(),
                    };

                    match dispatch_function_call(&function_call, context).await {
                        Ok(result) => {
                            // 过滤掉不需要的工具调用（如 WRITE_FILE_KEY）
                            if result.name != WRITE_FILE_KEY {
                                tool_results.push(result);
                            }
                        }
                        Err(e) => {
                            error!("Function call failed: {}", e);
                            eprintln!("Function call failed: {}", e)
                        }
                    }
                }
            }
        }

        Ok(tool_results)
    }
}
