package com.joker.demo.config;


import com.joker.demo.service.ChatAssistant;
import com.joker.demo.service.ChatMemoryAssistant;
import com.joker.demo.service.ToolAssistant;
import com.joker.demo.service.handler.InvoiceHandler;
import dev.langchain4j.agent.tool.ToolExecutionRequest;
import dev.langchain4j.agent.tool.ToolSpecification;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.memory.chat.TokenWindowChatMemory;
import dev.langchain4j.model.chat.ChatModel;
import dev.langchain4j.model.chat.StreamingChatModel;
import dev.langchain4j.model.chat.request.json.JsonObjectSchema;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiEmbeddingModel;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
import dev.langchain4j.model.openai.OpenAiTokenCountEstimator;
import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.tool.ToolExecutor;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.qdrant.QdrantEmbeddingStore;
import io.qdrant.client.QdrantClient;
import io.qdrant.client.QdrantGrpcClient;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;

import java.time.Duration;
import java.util.Map;

/**
 * @author : feixiang.li
 * @since : 2025-09-25 14:51
 */
@Slf4j
@Configuration
public class LLMConfig {

    @Value("${langchain4j.open-ai.chat-model.api-key}")
    private String apiKey;

    @Value("${langchain4j.open-ai.chat-model.base-url}")
    private String apiUrl;

    @Bean("grok")
    @Primary
    public ChatModel grok() {
        return OpenAiChatModel.builder()
                .apiKey(apiKey)
                .baseUrl(apiUrl)
                .modelName("qwen-flash")
                .timeout(java.time.Duration.ofSeconds(60))
                .temperature(0.7)
                .logRequests(true)
                .logResponses(true)
                .build();
    }


    @Bean("qwen")
    public ChatModel qwen() {
        OpenAiChatModel build = OpenAiChatModel.builder()
                .apiKey(apiKey)
                .baseUrl(apiUrl)
                .modelName("qwen3-max")
                .timeout(Duration.ofSeconds(60))
                .temperature(0.7).logRequests(true)
                .logResponses(true)
                .build();
        return build;
    }

    @Bean("long")
    public ChatModel chatModelLong() {
        OpenAiChatModel build = OpenAiChatModel.builder()
                .apiKey(apiKey)
                .baseUrl(apiUrl)
                .modelName("deepseek/deepseek-v3.1-terminus")
                .timeout(Duration.ofSeconds(60))
                .temperature(0.7).logRequests(true)
                .logResponses(true)
                .build();
        return build;
    }


    @Bean("chatMemoryWithMaxMessage")
    public ChatMemoryAssistant chatMemoryWithMaxMessage() {

        return AiServices.builder(ChatMemoryAssistant.class)
                .chatModel(chatModelLong())
                .chatMemoryProvider((Object o) -> {
                    return MessageWindowChatMemory.builder()
                            .id(o)
                            .maxMessages(10)
                            .build();
                })
                .build();
    }


    @Bean("chatTokenWindowChatMemory")
    public ChatMemoryAssistant chatTokenWindowChatMemory() {

        OpenAiTokenCountEstimator tokenCountEstimator = new OpenAiTokenCountEstimator("gpt-4");

        return AiServices.builder(ChatMemoryAssistant.class)
                .chatModel(chatModelLong())
                .chatMemoryProvider((Object o) -> {
                    return TokenWindowChatMemory
                            .builder().id(o)
                            .maxTokens(1000, tokenCountEstimator).build();
                })
                .build();
    }


    @Bean
    public StreamingChatModel streamingChatModel() {
        OpenAiStreamingChatModel build = OpenAiStreamingChatModel.builder()
                .apiKey(apiKey)
                .baseUrl(apiUrl)
                .modelName("qwen3-max")
                .timeout(Duration.ofSeconds(60))
                .temperature(0.7).logRequests(true)
                .logResponses(true)
                .build();
        return build;
    }


    @Bean

    public ToolAssistant functionAssistant(ChatModel chatModel) {
        ToolSpecification toolSpecification = ToolSpecification.builder()
                .name("开具发票助手")
                .description("根据用户提交到开票信息，开具发票")
                .parameters(JsonObjectSchema.builder()
                        .addStringProperty("companyName", "公司名称")
                        .addStringProperty("taxNo", "税号")
                        .addStringProperty("money", "金额")
                        .build())
                .build();

        ToolExecutor toolExecutor = (ToolExecutionRequest toolExecutionRequest, Object o) -> {
            log.info("执行工具:{}", toolExecutionRequest);
            log.info("参数:{}", toolExecutionRequest.arguments());
            log.info("参数:{}", o);
            return "开具发票成功";
        };
        return AiServices.builder(ToolAssistant.class)
                .chatModel(chatModel)
                .tools(Map.of(toolSpecification, toolExecutor))
                .build();
    }


    @Bean("highApi")
    @Primary
    public ToolAssistant highApi(ChatModel chatModel) {
        return AiServices.builder(ToolAssistant.class)
                .chatModel(chatModel)
                .tools(new InvoiceHandler())
                .build();
    }



    @Bean
    public QdrantClient qdrantClient() {
        QdrantGrpcClient.Builder builder = QdrantGrpcClient.newBuilder("106.12.75.108", 6334, false);
        return new QdrantClient(builder.build());
    }

    @Bean
    public EmbeddingModel embeddingModel() {
        return OpenAiEmbeddingModel.builder()
                .apiKey(apiKey)
                .baseUrl(apiUrl)
                .modelName("text-embedding-v4")
                .timeout(Duration.ofSeconds(60))
                .build();
    }

    @Bean
    public EmbeddingStore<TextSegment> embeddingStore() {
        return QdrantEmbeddingStore.builder()
                .host("106.12.75.108")
                .port(6334)
                .collectionName("my-qiniu")
                .build();
    }


    /**
     * 训练
     *
     * @param chatModel      联系名称
     * @param embeddingStore 嵌入存储
     * @return
     */
    @Bean
    public ChatAssistant assistant(ChatModel chatModel, EmbeddingStore<TextSegment> embeddingStore, EmbeddingModel embeddingModel) {
        EmbeddingStoreContentRetriever build = EmbeddingStoreContentRetriever.builder()
                .maxResults(3)
                .minScore(0.7)
                .embeddingStore(embeddingStore)
                .embeddingModel(embeddingModel)
                .build();
        return AiServices.builder(ChatAssistant.class)
                .chatModel(chatModel)
                .chatMemory(MessageWindowChatMemory.withMaxMessages(10))
                .contentRetriever(build)
                .build();
    }

}
