package com.ocean.filter.controller;

import io.github.lnyocly.ai4j.listener.SseListener;
import io.github.lnyocly.ai4j.platform.openai.chat.entity.ChatCompletion;
import io.github.lnyocly.ai4j.platform.openai.chat.entity.ChatCompletionResponse;
import io.github.lnyocly.ai4j.platform.openai.chat.entity.ChatMessage;
import io.github.lnyocly.ai4j.service.IChatService;
import io.github.lnyocly.ai4j.service.PlatformType;
import io.github.lnyocly.ai4j.service.factor.AiService;
import jakarta.annotation.Resource;
import jakarta.servlet.http.HttpServletResponse;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.chat.client.advisor.PromptChatMemoryAdvisor;
import org.springframework.ai.chat.memory.ChatMemory;
import org.springframework.ai.chat.memory.InMemoryChatMemory;
import org.springframework.ai.openai.OpenAiChatModel;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.RestController;
import reactor.core.publisher.Flux;

import java.io.PrintWriter;

import static org.springframework.ai.chat.client.advisor.AbstractChatMemoryAdvisor.CHAT_MEMORY_CONVERSATION_ID_KEY;
import static org.springframework.ai.chat.client.advisor.AbstractChatMemoryAdvisor.CHAT_MEMORY_RETRIEVE_SIZE_KEY;

@Controller
@RestController
public class OpenAiController {
    @Autowired
    private AiService aiService;

    @Resource
    private OpenAiChatModel openAiChatModel;

    private final ChatMemory chatMemory = new InMemoryChatMemory();

    @ResponseBody
    @GetMapping(value = "/chat")
    public String getChatMessage(@RequestParam String question) throws Exception {
        // 获取OpenAi的聊天服务
        IChatService chatService = aiService.getChatService(PlatformType.OPENAI);

        // 创建请求参数
        ChatCompletion chatCompletion = ChatCompletion.builder()
                .model("gpt-4o-mini")
                .message(ChatMessage.withUser(question))
                .build();

        System.out.println(chatCompletion);

        // 发送chat请求
        ChatCompletionResponse chatCompletionResponse = chatService.chatCompletion(chatCompletion);

        // 获取聊天内容和token消耗
        String content = chatCompletionResponse.getChoices().get(0).getMessage().getContent();
        long totalTokens = chatCompletionResponse.getUsage().getTotalTokens();
        System.out.println("聊天内容："+content);
        System.out.println("总token消耗: " + totalTokens);

        return content;
    }

    @GetMapping(value = "/chatStream")
    @ResponseBody
    public void getChatMessageStream(@RequestParam String question, HttpServletResponse response) throws Exception {
        // 中文乱码问题
        response.setCharacterEncoding("UTF-8");

        // 获取OpenAi的聊天服务
        IChatService chatService = aiService.getChatService(PlatformType.OPENAI);

        // 创建请求参数
        ChatCompletion chatCompletion = ChatCompletion.builder()
                .model("gpt-4o-mini")
                .message(ChatMessage.withUser(question))
                .build();

        PrintWriter writer = response.getWriter();
        // 发送chat请求
        SseListener sseListener = new SseListener() {
            @Override
            protected void send() {
                writer.write(this.getCurrStr());
                writer.flush();
                System.out.println(this.getCurrStr());
            }
        };
        chatService.chatCompletionStream(chatCompletion, sseListener);
        writer.close();
        System.out.println(sseListener.getOutput());
    }

    @GetMapping("/chatWithChatMemory")
    @ResponseBody
    public Flux<String> chatWithChatMemory(@RequestParam String chatId, @RequestParam String prompt) {
        OpenAiApi openAiApi = new OpenAiApi("https://api.chatanywhere.tech/","sk-js23mG1t3TBz0wLzdQHAESfVOweg3WSB624rGb0NFgOON0I1");
        OpenAiChatOptions options = OpenAiChatOptions.builder().withModel(OpenAiApi.ChatModel.GPT_4_O_MINI).withTemperature(0.7F).build();
        OpenAiChatModel openAiChatModel = new OpenAiChatModel(openAiApi,options);

        ChatClient chatClient = ChatClient.builder(openAiChatModel)
                .defaultAdvisors(new PromptChatMemoryAdvisor(chatMemory))
                .build();

        return chatClient.prompt()
                .user(prompt)
                .advisors(a -> a
                        .param(CHAT_MEMORY_CONVERSATION_ID_KEY, chatId)
                        .param(CHAT_MEMORY_RETRIEVE_SIZE_KEY, 100)
                )
                .stream()
                .content();
    }
}
