package com.tom.learnbase.two;

import com.tom.contants.Contants;
import dev.langchain4j.chain.ConversationalChain;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.ChatMessage;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.ChatMemoryProvider;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.memory.chat.TokenWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.MemoryId;
import dev.langchain4j.service.UserMessage;
import dev.langchain4j.store.memory.chat.ChatMemoryStore;
import org.junit.jupiter.api.Test;
import org.junit.platform.commons.logging.Logger;
import org.junit.platform.commons.logging.LoggerFactory;
import org.mapdb.DB;
import org.mapdb.DBMaker;

import java.util.List;
import java.util.Map;

import static dev.langchain4j.data.message.ChatMessageDeserializer.messagesFromJson;
import static dev.langchain4j.data.message.ChatMessageSerializer.messagesToJson;
import static dev.langchain4j.data.message.UserMessage.userMessage;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import static org.mapdb.Serializer.INTEGER;
import static org.mapdb.Serializer.STRING;

/**
 * 手动维护和管理ChatMessage 之外，更好的选择是ChatMemory
 * ChatMemory  充当 ChatMessage s的容器(由 List 支持)，具有持久性(请参阅 ChatMemoryStore )等附加功能和“退出策略”等机制
 *  LLMS 是有上下文tokens限制的，而且根据tokens数量收取奋勇
 *
 *  LangChain4j实现了两种驱逐策略算法(SystemMessage 总是被保留):
 *   1.MessageWindowChatMemory :  保留 N 最近的message，并清除不再适合的旧消息
 *   2.TokenWindowChatMemory : 保留 N 最近的token 它需要一个 Tokenizer 来计算每个 ChatMessage 中的标记
 *
 */
public class ChatMemoryTest {

    private static final Logger logger = LoggerFactory.getLogger(ChatMemoryTest.class);

    interface Assistant {

        String chat(String message);
    }
    /**
     * 保留 N 最近的message，并清除不再适合的旧消息
     */
    @Test
    public void MessageWindowChatMemoryTest(){
        //保留最近两条
        MessageWindowChatMemory messageWindowChatMemory = MessageWindowChatMemory.withMaxMessages(2);
        Assistant assistant = AiServices.builder(Assistant.class)
                .chatMemory(messageWindowChatMemory)
                .chatLanguageModel(Contants.openAiChatModel)
                .build();

        String answer = assistant.chat("Hello! My name is Klaus.");
        System.out.println(answer); // Hello Klaus! How can I assist you today?

        String answerWithName = assistant.chat("What is my name?");
        System.out.println(answerWithName); // Your name is Klaus.

    }


    interface AssistantUser {

        String chat(@MemoryId int memoryId, @UserMessage String userMessage);
    }


    /**
     * 多用户分离
     */
    @Test
    public void ServiceWithMemoryForEachUserTest(){
        AssistantUser assistant = AiServices.builder(AssistantUser.class)
                .chatLanguageModel(OpenAiChatModel.withApiKey(Contants.key))
                .chatMemoryProvider(memoryId -> MessageWindowChatMemory.withMaxMessages(10))
                .build();

        System.out.println(assistant.chat(1, "Hello, my name is Klaus"));
        // Hi Klaus! How can I assist you today?

        System.out.println(assistant.chat(2, "Hello, my name is Francine"));
        // Hello Francine! How can I assist you today?

        System.out.println(assistant.chat(1, "What is my name?"));
        // Your name is Klaus.

        System.out.println(assistant.chat(2, "What is my name?"));
        // Your name is Francine.
    }


    /**
     *
     * 对回话内容进行持久化
     * 并保存最近10条消息
     */
    @Test
    public void ServiceWithPersistentMemoryTest(){
        ChatMemory chatMemory = MessageWindowChatMemory.builder()
                .maxMessages(10)
                .chatMemoryStore(new PersistentChatMemoryStore())
                .build();

        Assistant assistant = AiServices.builder(Assistant.class)
                .chatLanguageModel(OpenAiChatModel.withApiKey(Contants.key))
                .chatMemory(chatMemory)
                .build();

        String answer = assistant.chat("Hello! My name is Klaus.");
        System.out.println(answer); // Hello Klaus! How can I assist you today?

        // Now, comment out the two lines above, uncomment the two lines below, and run again.

        // String answerWithName = assistant.chat("What is my name?");
        // System.out.println(answerWithName); // Your name is Klaus.
    }


    /**
     * org.mapdb 是一个用于Java的开源库，用于快速、高效地存储和检索数据。它提供了一种简单易用的方式来创建、操作和检索持久化存储的数据
     *
     */
    static class PersistentChatMemoryStore implements ChatMemoryStore {

        private final DB db = DBMaker.fileDB("chat-memory.db").transactionEnable().make();
        private final Map<String, String> map = db.hashMap("messages", STRING, STRING).createOrOpen();

        @Override
        public List<ChatMessage> getMessages(Object memoryId) {
            String json = map.get((String) memoryId);
            return messagesFromJson(json);
        }

        @Override
        public void updateMessages(Object memoryId, List<ChatMessage> messages) {
            String json = messagesToJson(messages);
            map.put((String) memoryId, json);
            db.commit();
        }

        @Override
        public void deleteMessages(Object memoryId) {
            map.remove((String) memoryId);
            db.commit();
        }
    }





    @Test
    public void ServiceWithPersistentMemoryForEachUserTest(){
        PersistentEachChatMemoryStore store = new PersistentEachChatMemoryStore();

        ChatMemoryProvider chatMemoryProvider = memoryId -> MessageWindowChatMemory.builder()
                .id(memoryId)
                .maxMessages(10)
                .chatMemoryStore(store)
                .build();

        AssistantUser assistant = AiServices.builder(AssistantUser.class)
                .chatLanguageModel(OpenAiChatModel.withApiKey(Contants.key))
                .chatMemoryProvider(chatMemoryProvider)
                .build();

        System.out.println(assistant.chat(1, "Hello, my name is Klaus"));
        System.out.println(assistant.chat(2, "Hi, my name is Francine"));

        // Now, comment out the two lines above, uncomment the two lines below, and run again.

        // System.out.println(assistant.chat(1, "What is my name?"));
        // System.out.println(assistant.chat(2, "What is my name?"));
    }


    static class PersistentEachChatMemoryStore implements ChatMemoryStore {

        private final DB db = DBMaker.fileDB("multi-user-chat-memory.db").transactionEnable().make();
        private final Map<Integer, String> map = db.hashMap("messages", INTEGER, STRING).createOrOpen();

        @Override
        public List<ChatMessage> getMessages(Object memoryId) {
            String json = map.get((int) memoryId);
            return messagesFromJson(json);
        }

        @Override
        public void updateMessages(Object memoryId, List<ChatMessage> messages) {
            String json = messagesToJson(messages);
            map.put((int) memoryId, json);
            db.commit();
        }

        @Override
        public void deleteMessages(Object memoryId) {
            map.remove((int) memoryId);
            db.commit();
        }
    }


    /**
     *使用chain 会话
     */
    @Test
    public void ConversationalChainTest(){
        ConversationalChain chain = ConversationalChain.builder()
                .chatLanguageModel(OpenAiChatModel.withApiKey(Contants.key))
                // .chatMemory() // you can override default chat memory
                .build();

        String answer = chain.execute("Hello, my name is Klaus");
        System.out.println(answer); // Hello Klaus! How can I assist you today?

        String answerWithName = chain.execute("What is my name?");
        System.out.println(answerWithName); // Your name is Klaus.
    }


    /**
     *使用chain 会话
     */
    @Test
    public void ConversationalChainTokenWindowChatMemoryTest(){
        ChatLanguageModel model = OpenAiChatModel.withApiKey(Contants.key);

        ChatMemory chatMemory = TokenWindowChatMemory.withMaxTokens(300, new OpenAiTokenizer(GPT_3_5_TURBO));

        // You have full control over the chat memory.
        // You can decide if you want to add a particular message to the memory
        // (e.g. you might not want to store few-shot examples to save on tokens).
        // You can process/modify the message before saving if required.

        chatMemory.add(userMessage("Hello, my name is Klaus"));
        AiMessage answer = model.generate(chatMemory.messages()).content();
        System.out.println(answer.text()); // Hello Klaus! How can I assist you today?
        chatMemory.add(answer);

        chatMemory.add(userMessage("What is my name?"));
        AiMessage answerWithName = model.generate(chatMemory.messages()).content();
        System.out.println(answerWithName.text()); // Your name is Klaus.
        chatMemory.add(answerWithName);

        System.out.println("-------------------------");
        chatMemory.messages().stream().forEach(chatMessage -> System.out.println(chatMessage.text()));
        System.out.println("-------------------------");
    }


}
