package com.zhilei.deepseekdoctor.service.impl;

import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.zhilei.deepseekdoctor.enums.ChatTypeEnum;
import com.zhilei.deepseekdoctor.enums.SSEMsgType;
import com.zhilei.deepseekdoctor.mapper.ChatRecordMapper;
import com.zhilei.deepseekdoctor.pojo.ChatEntity;
import com.zhilei.deepseekdoctor.pojo.ChatRecord;
import com.zhilei.deepseekdoctor.service.OllamaService;
import com.zhilei.deepseekdoctor.utils.SSEServer;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.ai.chat.ChatResponse;
import org.springframework.ai.chat.messages.AssistantMessage;
import org.springframework.ai.chat.messages.UserMessage;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.ollama.OllamaChatClient;
import org.springframework.stereotype.Service;
import reactor.core.publisher.Flux;

import java.time.LocalDateTime;
import java.util.List;
import java.util.stream.Collectors;

@Slf4j
@Service
public class OllamaServiceImpl implements OllamaService {

    @Resource
    OllamaChatClient ollamaChatClient;

    @Resource
    private ChatRecordMapper chatRecordMapper;

    /**
     * 测试引入chat
     * 构建对话提示（Prompt）对象 的典型写法，用于将用户输入封装为模型可以理解的格式，常见于 ChatGLM、ChatGPT、Claude 等大模型 API 的 Java SDK 中
     * call有两种方法，去调用
     * 1. aiOllamaChat.call(msg) ：传入一个消息，这个消息会封装成 prompt
     * @FunctionalInterface
     * public interface ChatClient extends ModelClient<Prompt, ChatResponse> {
     *     default 是一个✅同步调用（阻塞式）
     *     default String call(String message) {
     *         Prompt prompt = new Prompt(new UserMessage(message));
     *         Generation generation = this.call(prompt).getResult();
     *         return generation != null ? generation.getOutput().getContent() : "";
     *     }
     *
     *     ChatResponse call(Prompt prompt);
     * }
     * 2. aiOllamaChat.call(prompt) ：传入一个提示词对象
     *
     * @param msg 提示词对象
     * @return
     */
    @Override
    public String aiOllamaChat(String msg) {
        return ollamaChatClient.call(msg);
    }

    /**
     * 测试引入chat
     * 构建对话提示（Prompt）对象 的典型写法，用于将用户输入封装为模型可以理解的格式，常见于 ChatGLM、ChatGPT、Claude 等大模型 API 的 Java SDK 中
     * call有两种方法，去调用
     * 1. aiOllamaChat.call(msg) ：传入一个消息，这个消息会封装成 prompt
     * @FunctionalInterface
     * public interface ChatClient extends ModelClient<Prompt, ChatResponse> {
     *     default 是一个✅同步调用（阻塞式）
     *     default String call(String message) {
     *         Prompt prompt = new Prompt(new UserMessage(message));
     *         Generation generation = this.call(prompt).getResult();
     *         return generation != null ? generation.getOutput().getContent() : "";
     *     }
     *
     *     ChatResponse call(Prompt prompt);
     * }
     * 2. aiOllamaChat.call(prompt) ：传入一个提示词对象
     *
     * @param prompt 提示词对象
     * @return
     */
    @Override
    public String aiOllamaPromptChat(Prompt prompt) {
        ChatResponse call = ollamaChatClient.call(prompt);
        AssistantMessage output = call.getResult().getOutput();
        String content = output.getContent();
        return content;
    }

    /**
     * ollama流式对话
     *
     * @param msg 提示词对象
     * @return
     */
    @Override
    public Flux<ChatResponse> aiOllamaChatStream(String msg) {
        UserMessage userMessage = new UserMessage(msg);
        Prompt prompt = new Prompt(userMessage);
        return ollamaChatClient.stream(prompt);
    }

    /**
     * ollama流式对话,提取提示词
     *
     * @param msg 提示词对象
     * @return
     */
    @Override
    public List<String> aiOllamaChatstreamCount(String msg) {
        UserMessage userMessage = new UserMessage(msg);
        Prompt prompt = new Prompt(userMessage);
        Flux<ChatResponse> streamResponse = ollamaChatClient.stream(prompt);

        List<String> collect = streamResponse.toStream().map(s -> s.getResult().getOutput().getContent())
                .collect((Collectors.toList()));

        return collect;
    }

    /**
     * ollama流式对话,配合SSE
     *
     * @param chat 用户名
     * @return
     */
    @Override
    public void aiV3OllamaChatstreamCount(ChatEntity chat) {

        String message = chat.getMessage();
        String userName = chat.getCurrentUserName();

        // 保存用户发送的聊天记录
        saveChatRecord(userName, message, ChatTypeEnum.USER);

        // 用户消息
        UserMessage userMessage = new UserMessage(message);
        Prompt prompt = new Prompt(userMessage);
        Flux<ChatResponse> streamResponse = ollamaChatClient.stream(prompt);

        List<String> list = streamResponse.toStream().map(
                        s -> {
                            String content = s.getResult().getOutput().getContent();

                            // 调用sse的追加的方法
                            SSEServer.sendMessage(userName, content, SSEMsgType.ADD);
                            return content;
                        })
                .collect((Collectors.toList()));

        StringBuilder htmlResult = new StringBuilder();
        for (String s : list) {
            htmlResult.append(s);
        }
        saveChatRecord(userName, htmlResult.toString(), ChatTypeEnum.BOT);

        // 调用sse的结束的方法
        SSEServer.sendMessage(userName, "over", SSEMsgType.FINISH);

    }

    /**
     * 当前的沟通的对象查询
     *
     * @param who
     * @return
     */
    @Override
    public List<ChatRecord> getRecords(String who) {
        List<ChatRecord> familyMember = chatRecordMapper.selectList(new QueryWrapper<ChatRecord>().eq("family_member", who));
        return familyMember;
    }

    /**
     * 保存聊天记录
     *
     * @param userName 用户名
     * @param message  消息
     * @param chatType 是谁发的
     */
    @Override
    public void saveChatRecord(String userName, String message, ChatTypeEnum chatType) {

        ChatRecord chatRecord = new ChatRecord();
        chatRecord.setContent(message);
        chatRecord.setChatType(chatType.type);
        chatRecord.setFamilyMember(userName);
        chatRecord.setChatTime(LocalDateTime.now());
        chatRecordMapper.insert(chatRecord);

    }


}
















