package com.zx.lc.service;

import dev.langchain4j.service.MemoryId;
import dev.langchain4j.service.SystemMessage;
import dev.langchain4j.service.TokenStream;
import dev.langchain4j.service.UserMessage;
import dev.langchain4j.service.V;
import dev.langchain4j.service.spring.AiService;
import dev.langchain4j.service.spring.AiServiceWiringMode;

// AiService 声明AI服务，可以指定模型
@AiService(wiringMode = AiServiceWiringMode.EXPLICIT,
        chatMemoryProvider = "myChatMemoryProvider",
        streamingChatModel = "ollamaStreamingChatModel")
public interface OllamaAssistant2 {

    /**
     * 使用@MemoryId + ChatMemoryProvider 实现多个用户之间的会话存储
     * 使用流式消息
     * @param userMessage
     * @return
     */
    @SystemMessage("你是我的私人管家")
    TokenStream stream(@MemoryId int memoryId,
            @UserMessage("我兴奋的跟你说：{{msg}}") @V("msg") String userMessage);
}
