package com.artisan.controller;

import com.alibaba.cloud.ai.dashscope.chat.DashScopeChatModel;
import com.artisan.utils.MultiPlatformAndModelOptions;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.chat.model.ChatModel;
import org.springframework.ai.chat.prompt.ChatOptions;
import org.springframework.ai.deepseek.DeepSeekChatModel;
import org.springframework.ai.ollama.OllamaChatModel;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import reactor.core.publisher.Flux;

import java.util.HashMap;

@RestController
public class MultiPlatformAndModelController {

    /**
     * 存储不同平台对应的聊天模型映射关系
     * key: 平台名称
     * value: 对应的聊天模型对象
     */
    HashMap<String, ChatModel> platforms=new HashMap<>();


    /**
     * 构造函数，用于初始化多平台和模型控制器
     *
     * @param dashScopeChatModel DashScope聊天模型实例，用于处理DashScope平台的聊天请求
     * @param deepSeekChatModel DeepSeek聊天模型实例，用于处理DeepSeek平台的聊天请求
     * @param ollamaChatModel Ollama聊天模型实例，用于处理Ollama平台的聊天请求
     */
    public MultiPlatformAndModelController(
            DashScopeChatModel dashScopeChatModel,
            DeepSeekChatModel deepSeekChatModel,
            OllamaChatModel ollamaChatModel
    ) {
        // 将不同平台的聊天模型实例存储到平台映射表中
        platforms.put("dashscope", dashScopeChatModel);
        platforms.put("ollama", ollamaChatModel);
        platforms.put("deepseek", deepSeekChatModel);
    }


        /**
     * 处理聊天请求，根据指定平台和模型配置与AI进行对话
     * @param message 用户输入的聊天消息
     * @param options 包含平台、模型和温度等配置信息的选项对象
     * @return 返回AI回复内容的流式响应
         *
         * http://localhost:8080/chat2?platform=deepseek&model=deepseek-chat&temperature=0.7&message=%E4%BD%A0%E6%98%AF%E8%B0%81
         *
         * http://localhost:8080/chat2?platform=ollama&model=qwen3:0.6b&temperature=0.7&message=%E4%BD%A0%E6%98%AF%E8%B0%81
         *
         * http://localhost:8080/chat2?platform=dashscope&model=qwen-plus&temperature=0.7&message=%E4%BD%A0%E6%98%AF%E8%B0%81
     */
    @RequestMapping(value="/chat2",produces = "text/stream;charset=UTF-8")
    public Flux<String> chat(
            String message,
            MultiPlatformAndModelOptions  options){

        // 获取指定平台的聊天模型配置
        String platform = options.getPlatform();
        ChatModel chatModel = platforms.get(platform);

        // 构建聊天客户端
        ChatClient.Builder builder = ChatClient.builder(chatModel);

        ChatClient chatClient = builder.defaultOptions(
                ChatOptions.builder()
                        .temperature(options.getTemperature())
                        .model(options.getModel())
                        .build()
        ).build();

        // 发送用户消息并获取AI回复内容的流式响应
        Flux<String> content = chatClient.prompt().user(message).stream().content();

        return content;

    }

}
