package com.icecream.springaiquickstart.controller;

import jakarta.servlet.http.HttpServletResponse;
import org.springframework.ai.chat.messages.UserMessage;
import org.springframework.ai.chat.model.ChatResponse;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.deepseek.DeepSeekChatModel;
import org.springframework.ai.deepseek.DeepSeekChatOptions;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import reactor.core.publisher.Flux;

@RestController
@RequestMapping("/ai")
public class ChatController {
    @Autowired
    private DeepSeekChatModel chatModel;

    @GetMapping("/generate")
    public String generate(@RequestParam(value = "message", defaultValue = "给我讲个笑话") String message) {
        System.out.println("收到消息："+message);
        String result = chatModel.call(message);
        //模型返回的内容
        System.out.println(result);
        return result;
    }


    //流式获取模型返回的内容，获取返回的 ChatResponse 对象内容
    @GetMapping("/generateStream1")
    public Flux<ChatResponse> generateStream1(@RequestParam(value = "message", defaultValue = "给我讲个笑话") String message) {
        System.out.println("收到消息："+message);
        Prompt prompt = new Prompt(new UserMessage(message));
        return chatModel.stream(prompt);
    }

    //流式获取模型返回的内容，获取返回的 Text 内容
    @GetMapping("/generateStream2")
    public Flux<String> generateStream2(
            @RequestParam(value = "message", defaultValue = "给我讲个笑话") String message,
            HttpServletResponse response) {
        // 避免返回乱码
        response.setCharacterEncoding("UTF-8");
        System.out.println("收到消息："+message);
        Prompt prompt = new Prompt(new UserMessage(message));
        Flux<String> result = chatModel.stream(prompt)
                .map(chatResponse -> chatResponse.getResult().getOutput().getText());

        return result;
    }

    //http://localhost:8080/ai/runtimeOptions?message=1加1等于几&temp=0.1
    //http://localhost:8080/ai/runtimeOptions?message=1加1等于几&temp=2
    @GetMapping("/runtimeOptions")
    public String runtimeOptions( @RequestParam(value = "message") String message,
                                  @RequestParam(value = "temp", required = false) Double temp ) {
        System.out.println("收到消息："+message);
        Prompt prompt;
        if (temp != null) {
            // 构建带 temperature 的 DeepSeekChatOptions，覆盖默认 temperature
            DeepSeekChatOptions build = DeepSeekChatOptions.builder().temperature(temp).build();
            prompt = new Prompt(message, build);
            System.out.println("使用运行时覆盖 temperature=" + temp);
        } else {
            // 无 temperature 传入时，使用默认配置
            prompt = new Prompt(message);
            System.out.println("使用默认 temperature");
        }
        ChatResponse resp = chatModel.call(prompt);
        String result = resp.getResult().getOutput().getText();
        System.out.println("模型返回："+ result);
        return result;
    }
}
