package com.tfjybj.metaverseai.controller;

import org.springframework.ai.chat.messages.AssistantMessage;
import org.springframework.ai.chat.model.ChatResponse;
import org.springframework.ai.chat.model.Generation;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.openai.OpenAiChatModel;
import org.springframework.ai.openai.OpenAiChatOptions;
import org.springframework.ai.zhipuai.ZhiPuAiChatModel;
import org.springframework.ai.zhipuai.ZhiPuAiChatOptions;
import org.springframework.ai.zhipuai.api.ZhiPuAiApi;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.http.codec.ServerSentEvent;
import org.springframework.web.bind.annotation.CrossOrigin;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import reactor.core.publisher.Flux;

import java.util.Objects;

@RestController
@RequestMapping("/ai")
public class ChatController {

    @Autowired
    private OpenAiChatModel chatClient;

    @Autowired
    private ZhiPuAiChatModel zhipuChatModel;

    @CrossOrigin(origins = "http://localhost:63342/")
    @GetMapping(value = "/easyChat", params = "message", produces = MediaType.TEXT_EVENT_STREAM_VALUE)
    public Flux<ServerSentEvent<String>> easyChat(@RequestParam String message) {
        System.out.println(1111);
        Prompt prompt = new Prompt(message, OpenAiChatOptions.builder()
                .withModel("kimi") //大模型用哪个
                .withTemperature(0.9f) //温度高，更发散,准确性降低，温度低，更保守，准确性高
                .build());
        System.out.println(chatClient.call(prompt));
        return chatClient.stream(prompt)
                .filter(Objects::nonNull)
                .filter(chatResponse -> chatResponse.getResults() != null)
                .flatMap(chatResponse -> Flux.fromIterable(chatResponse.getResults()))
                .filter(Objects::nonNull)
                .map(Generation::getOutput)
                .filter(Objects::nonNull)
                .filter(content -> Objects.nonNull(content.getContent()))
                .map(AssistantMessage::getContent)
                .filter(Objects::nonNull)
                .map(content -> ServerSentEvent.builder(content).build())
                .doOnNext(System.out::println)
                .concatWith(Flux.just(ServerSentEvent.builder("complete").build())); // Optionally, you can add a completion signal
    }

    @GetMapping(value = "/easyAnswer", params = "studentAnswer", produces = MediaType.TEXT_EVENT_STREAM_VALUE)
    public String easyAnswer(@RequestParam String textQuestion,@RequestParam String studentAnswer) {
        String systemPrompt = "你是一个优秀的全科老师,\n" +
                "学生在上课的时候对问题进行了回答，我们根据学生的回答生成了词云，\n" +
                "我会告诉你问题和学生回答的词频统计，\n" +
                "请你根据问题和学生回答的词频统计，通过分析同学们的回答，生成一篇文章,来给同学们阅读。\n";

        String message = systemPrompt+"问题: '''" + textQuestion + "'''\n\n" +
                "学生回答的词频统计:''' " + studentAnswer + "'''\n";

//        System.out.println(message);
        Prompt prompt = new Prompt(message, OpenAiChatOptions.builder()
                .withModel("kimi") //大模型用哪个
                .withTemperature(0.5f) //温度高，更发散,准确性降低，温度低，更保守，准确性高
                .build());
        Prompt zhipuPrompt = new Prompt(message, ZhiPuAiChatOptions.builder()
                .withModel(ZhiPuAiApi.ChatModel.GLM_3_Turbo.getValue())
                .withTemperature(0.5f)
                .build());
//        ChatResponse response= chatClient.call(prompt);
        ChatResponse response= zhipuChatModel.call(zhipuPrompt);
        System.out.println(response.getResult().getOutput().getContent());
        return response.getResult().getOutput().getContent();
    }
}

