package com.mario.nanjing.ai.config;

import com.mario.nanjing.ai.listener.TestChatModelListener;
import dev.langchain4j.model.chat.ChatModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

import java.time.Duration;
import java.util.List;

/**
 * @author MarioZzz
 * @create 2025-08-02 15:15
 * @Description: 知识出处 https://docs.langchain4j.dev/tutorials/model-parameters/
 */
@Configuration
public class LLMConfig {


    @Bean
    public ChatModel chatModelQwen() {

        return OpenAiChatModel.builder()
                .apiKey(System.getenv("l4j_qwen_api"))
                .modelName("qwen-plus")
                .baseUrl("https://dashscope.aliyuncs.com/compatible-mode/v1")
                .logRequests(true) // 日志级别设置为debug才有效
                .logResponses(true)// 日志级别设置为debug才有效
                .listeners(List.of(new TestChatModelListener()))
                .maxRetries(3)
                .timeout(Duration.ofSeconds(5))//向大模型发送请求时，如在指定时间内没有收到响应，该请求将被中断并报request timed out
                .build();
    }
}
