package com.atg.spring_ai.controller;


import com.alibaba.cloud.ai.dashscope.api.DashScopeApi;
import com.alibaba.cloud.ai.dashscope.chat.DashScopeChatModel;
import com.alibaba.cloud.ai.dashscope.chat.DashScopeChatOptions;
import com.alibaba.cloud.nacos.utils.StringUtils;
import com.atg.spring_ai.common.BaseResponse;
import com.atg.spring_ai.common.ResultUtils;
import com.atg.spring_ai.exception.BusinessException;
import com.atg.spring_ai.exception.ErrorCode;
import com.atg.spring_ai.model.dto.ChatRequest;
import jakarta.servlet.http.HttpServletResponse;
import org.springframework.ai.chat.model.ChatModel;
import org.springframework.ai.chat.model.ChatResponse;
import org.springframework.ai.chat.prompt.ChatOptions;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.web.bind.annotation.*;
import reactor.core.publisher.Flux;

import java.util.concurrent.CompletableFuture;

/*
author: atg
time: 2025/6/8 11:10
*/
@RestController
@RequestMapping("/dashscope/chat")
public class DashScopeChatController {

    private static final String PROMOTE_CHAT = "你是一位经验丰富的情感专家，擅长处理复杂的人际关系和情感问题。你拥有心理学背景，并且经过多年的实践，积累了丰富的经验。你的目标是帮助人们理解和解决他们的情感困扰，提升他们的生活质量";
//    ChatModel 的工作原理是接收 Prompt 或部分对话作为输入，
//    将输入发送给后端大模型，模型根据其训练数据和对自然语言的理解生成对话响应，
//    应用程序可以将响应呈现给用户或用于进一步处理

    private final ChatModel dashScopeChat;

    public DashScopeChatController(ChatModel dashScopeChat) {
        this.dashScopeChat = dashScopeChat;
    }

    /**
     * 简单对话 系统预设
     *
     * @return
     */

    @GetMapping("/SimpleChat")
    public BaseResponse<String> SimpleChat() {

        ChatResponse chatResponse = dashScopeChat.call(new Prompt(PROMOTE_CHAT)); //调用模型
        // 获取模型响应的文本
        String text = chatResponse.getResult().getOutput().getText();
        return ResultUtils.success(text);
    }

    /**
     * 简单对话 使用prompt作为用户的输入
     *
     * @return
     */
    @PostMapping("/chatWithRole")
    public CompletableFuture<BaseResponse<String>> ChatWithRole(@RequestBody ChatRequest chatRequest) {
        return CompletableFuture.supplyAsync(() -> {
            String content = chatRequest.getContent();
            if (StringUtils.isBlank(content)) {
                throw new BusinessException(ErrorCode.PARAMS_ERROR);
            }
            String fullPrompt = PROMOTE_CHAT + "\n\n用户的问题：" + content;

            ChatResponse chatResponse = dashScopeChat.call(new Prompt(fullPrompt));
            String text = chatResponse.getResult().getOutput().getText();
            return ResultUtils.success(text);
        });
    }



    /**
     * 通过 ChatOptions 在每次调用中调整模型参数
     * @param chatRequest
     * @return
     */
    @PostMapping("/chatWithOptions")
    public BaseResponse<String> ChatWithOptions(@RequestBody ChatRequest chatRequest) {

        String content = chatRequest.getContent();
        if (StringUtils.isBlank(content)) {
            throw new BusinessException(ErrorCode.PARAMS_ERROR);
        }
        String fullPrompt = PROMOTE_CHAT + "\n\n用户的问题：" + content;

        Prompt prompt = new Prompt(fullPrompt, DashScopeChatOptions
                .builder()
                .withModel(DashScopeApi.ChatModel.QWEN_PLUS.getModel())
                .withTemperature(0.7).withMaxToken(150)
                .build());

        ChatResponse chatResponse = dashScopeChat.call(prompt); //调用模型
        // 获取模型响应的文本
        String text = chatResponse.getResult().getOutput().getText();
        return ResultUtils.success(text);
    }


    @PostMapping("/chatAndStringWithOptions")
    public Flux<String> ChatAndStreamWithOptions(@RequestBody ChatRequest chatRequest, HttpServletResponse response) {
        response.setContentType("text/event-stream");
        response.setCharacterEncoding("UTF-8");

        String content = chatRequest.getContent();
        if (StringUtils.isBlank(content)) {
            throw new BusinessException(ErrorCode.PARAMS_ERROR);
        }
        String fullPrompt = PROMOTE_CHAT + "\n\n用户的问题：" + content;

        Prompt prompt = new Prompt(fullPrompt, DashScopeChatOptions
                .builder()
                .withModel(DashScopeApi.ChatModel.QWEN_PLUS.getModel())
                .withTemperature(0.7).withMaxToken(150)
                .build());

        Flux<ChatResponse>  responseStream = dashScopeChat.stream(prompt);//调用模型
        // 获取模型响应的文本

        return responseStream.mapNotNull(res -> res.getResult().getOutput().getText());
    }


}
