package com.boulderai.mcp.example;

import com.boulderai.mcp.model.llm.LlmMessage;
import com.boulderai.mcp.model.llm.LlmResponse;
import com.boulderai.mcp.service.LlmMcpService;
import com.boulderai.mcp.service.McpService;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.CommandLineRunner;
import org.springframework.stereotype.Component;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * 大模型与MCP集成示例
 * 演示如何使用大模型调用MCP工具
 */
@Component
public class LlmMcpExample implements CommandLineRunner {
    
    private static final Logger logger = LoggerFactory.getLogger(LlmMcpExample.class);
    
    @Autowired
    private McpService mcpService;
    
    @Autowired
    private LlmMcpService llmMcpService;
    
    private final ObjectMapper objectMapper = new ObjectMapper();
    
    @Override
    public void run(String... args) throws Exception {
        // 检查是否有命令行参数指示运行示例
        boolean runExample = false;
        for (String arg : args) {
            if ("--run-llm-example".equals(arg)) {
                runExample = true;
                break;
            }
        }
        
        if (!runExample) {
            logger.info("LLM-MCP example not requested. Use --run-llm-example to run the example.");
            return;
        }
        
        logger.info("Starting LLM-MCP integration example...");
        
        try {
            // 运行示例
            runLlmMcpExample();
        } catch (Exception e) {
            logger.error("Error running LLM-MCP example", e);
        }
    }
    
    private void runLlmMcpExample() {
        String sessionId = "example-session-" + System.currentTimeMillis();
        
        // 1. 连接到MCP服务器（假设本地有一个MCP服务器运行）
        String mcpServerUri = "ws://localhost:3001";
        
        logger.info("Connecting to MCP server: {}", mcpServerUri);
        
        mcpService.connect(sessionId, mcpServerUri)
                .thenCompose(v -> {
                    logger.info("Connected to MCP server successfully");
                    
                    // 2. 初始化MCP会话
                    return mcpService.initialize(sessionId, createClientInfo());
                })
                .thenCompose(initResult -> {
                    logger.info("MCP session initialized: {}", initResult);
                    
                    // 3. 准备对话消息
                    List<LlmMessage> messages = createExampleMessages();
                    
                    // 4. 配置大模型客户端
                    Map<String, String> clientConfig = createLlmClientConfig();
                    
                    // 5. 与大模型对话，自动调用MCP工具
                    return llmMcpService.chatWithMcp(
                            sessionId,
                            "openai",
                            "gpt-3.5-turbo",
                            messages,
                            clientConfig
                    ).toFuture();
                })
                .thenAccept(response -> {
                    logger.info("LLM response received:");
                    if (response.getChoices() != null && !response.getChoices().isEmpty()) {
                        LlmMessage assistantMessage = response.getChoices().get(0).getMessage();
                        logger.info("Assistant: {}", assistantMessage.getContent());
                        
                        if (assistantMessage.getToolCalls() != null) {
                            logger.info("Tool calls made: {}", assistantMessage.getToolCalls().size());
                        }
                    }
                    
                    if (response.getUsage() != null) {
                        logger.info("Token usage - Prompt: {}, Completion: {}, Total: {}",
                                response.getUsage().getPromptTokens(),
                                response.getUsage().getCompletionTokens(),
                                response.getUsage().getTotalTokens());
                    }
                })
                .exceptionally(throwable -> {
                    logger.error("Error in LLM-MCP example", throwable);
                    return null;
                })
                .thenCompose(v -> {
                    // 6. 清理：断开MCP连接
                    logger.info("Disconnecting from MCP server...");
                    return mcpService.disconnect(sessionId);
                })
                .thenRun(() -> {
                    logger.info("LLM-MCP example completed successfully");
                });
    }
    
    private com.fasterxml.jackson.databind.JsonNode createClientInfo() {
        try {
            return objectMapper.createObjectNode()
                    .put("name", "BoulderAI MCP Client")
                    .put("version", "1.0.0")
                    .set("capabilities", objectMapper.createObjectNode()
                            .put("tools", true)
                            .put("resources", true));
        } catch (Exception e) {
            logger.error("Error creating client info", e);
            return objectMapper.createObjectNode();
        }
    }
    
    private List<LlmMessage> createExampleMessages() {
        List<LlmMessage> messages = new ArrayList<>();
        
        // 系统消息
        messages.add(LlmMessage.systemMessage(
                "You are a helpful assistant that can use various tools to help users. " +
                "When you need to perform actions or get information, use the available tools."
        ));
        
        // 用户消息 - 这里可以根据实际的MCP工具来设计问题
        messages.add(LlmMessage.userMessage(
                "Please help me list the available files in the current directory and " +
                "then read the content of any README file if it exists."
        ));
        
        return messages;
    }
    
    private Map<String, String> createLlmClientConfig() {
        Map<String, String> config = new HashMap<>();
        
        // 从环境变量获取API密钥
        String apiKey = System.getenv("OPENAI_API_KEY");
        if (apiKey != null && !apiKey.trim().isEmpty()) {
            config.put("apiKey", apiKey);
        }
        
        // 可以配置自定义的base URL
        String baseUrl = System.getenv("OPENAI_BASE_URL");
        if (baseUrl != null && !baseUrl.trim().isEmpty()) {
            config.put("baseUrl", baseUrl);
        }
        
        return config;
    }
    
    /**
     * 演示流式对话的示例方法
     */
    public void runStreamingExample(String sessionId) {
        List<LlmMessage> messages = createExampleMessages();
        Map<String, String> clientConfig = createLlmClientConfig();
        
        logger.info("Starting streaming chat example...");
        
        llmMcpService.chatStreamWithMcp(
                sessionId,
                "openai",
                "gpt-3.5-turbo",
                messages,
                clientConfig
        )
        .doOnNext(response -> {
            if (response.getChoices() != null && !response.getChoices().isEmpty()) {
                LlmMessage message = response.getChoices().get(0).getMessage();
                if (message.getContent() != null) {
                    System.out.print(message.getContent());
                }
            }
        })
        .doOnComplete(() -> {
            System.out.println();
            logger.info("Streaming chat completed");
        })
        .doOnError(error -> {
            logger.error("Error in streaming chat", error);
        })
        .subscribe();
    }
}