package com.abc.ollama.service.impl;

import com.abc.ollama.entity.Embedding;
import com.abc.ollama.utils.OllamaUtils;
import com.abc.ollama.utils.WordUtil;
import com.abc.ollama.vo.*;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.micrometer.core.instrument.util.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.web.reactive.function.client.WebClient;
import reactor.core.publisher.Flux;
import org.springframework.http.MediaType;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import java.util.stream.Collectors;
import java.util.stream.IntStream;

@Service
public class OllamaClientService {

    private static final Logger log = LoggerFactory.getLogger(OllamaClientService.class);
    private final WebClient webClient;

    @Value("${spring.ai.ollama.chat.model}")
    private String chatModel;

    @Value("${spring.ai.ollama.embedding.model}")
    private String embeddingModel;

    @Value("${spring.ai.deepseek.model}")
    private String deepseekModel;

    @Value("${silicon.api-key}")
    private String apiKey;

    int topN = 3;
    int MAX_CONTEXT_LEN = 8192;

    @Autowired
    private EmbeddingService embeddingService;

    public OllamaClientService(@Value("${spring.ai.ollama.base-url}") String ollamaBaseUrl, WebClient.Builder webClientBuilder) {
        this.webClient = webClientBuilder
                .baseUrl(ollamaBaseUrl)
                .build();
    }

    public String deepseekChatNotStream(OllamaChatRequest chatRequest) {
        WebClient webClient = WebClient.builder()
                .baseUrl("https://api.siliconflow.cn/v1/")
                .build();
        chatRequest.setModel(deepseekModel);
        try {
            return webClient.post()
                    .uri("/chat/completions")
                    .contentType(MediaType.APPLICATION_JSON)
                    .accept(MediaType.APPLICATION_JSON)
                    .header("Authorization", "Bearer " + apiKey)
                    .bodyValue(chatRequest)
                    .retrieve()
                    .bodyToMono(ChatCompletionResponse.class)
                    .map(response -> {
                        if (response.getChoices() != null && !response.getChoices().isEmpty()) {
                            ChatCompletionResponse.Choice choice = response.getChoices().get(0);
                            ChatCompletionResponse.Message message = choice.getMessage();
                            return message.getContent();
                        } else {
                            return "";
                        }
                    })
                    .block(); // 阻塞并获取结果
        } catch (Exception e) {
            log.error("Error calling DeepSeek API: {}", e.getMessage());
            return "";
        }
    }

    private String removeDoneToken(String response) {
        if (response.contains("DONE")) {
            return response.replace("DONE", "").trim();
        }
        return response;
    }
//    public Flux<String> deepseekChat(OllamaChatRequest chatRequest) {
//        WebClient webClient = WebClient.builder()
//                .baseUrl("https://api.siliconflow.cn/v1/")
//                .build();
//
//        chatRequest.setModel("deepseek-ai/DeepSeek-R1-Distill-Llama-70B");
//        chatRequest.setStream(true);
//        return webClient.post()
//                .uri("/chat/completions")
//                .contentType(MediaType.APPLICATION_JSON)
//                .accept(MediaType.TEXT_EVENT_STREAM)
//                .header("Authorization", "Bearer " + apiKey)
//                .bodyValue(chatRequest)
//                .retrieve()
//                .bodyToFlux(String.class)
//                .map(this::removeDoneToken)  // 去除"DONE"标记
//                .flatMap(this::extractValidContent)  // 提取有效的content
//                .filter(StringUtils::isNotBlank);  // 过滤掉空字符或仅包含空格的内容
//    }

    public Flux<String> deepseekChat(OllamaChatRequest chatRequest) {
        WebClient webClient = WebClient.builder()
                .baseUrl("https://api.siliconflow.cn/v1/")
                .build();
        chatRequest.setModel(deepseekModel);
        chatRequest.setStream(true);
        return webClient.post()
                .uri("/chat/completions")
                .contentType(MediaType.APPLICATION_JSON)
                .accept(MediaType.TEXT_EVENT_STREAM)
                .header("Authorization", "Bearer " + apiKey)
                .bodyValue(chatRequest)
                .retrieve()
                .bodyToFlux(String.class);
    }

    private Flux<String> extractValidContent(String jsonResponse) {
        try {
            ObjectMapper objectMapper = new ObjectMapper();
            JsonNode rootNode = objectMapper.readTree(jsonResponse);
            if (rootNode.has("choices")) {
                JsonNode choicesNode = rootNode.get("choices");
                for (JsonNode choice : choicesNode) {
                    JsonNode messageNode = choice.get("delta");
                    if (messageNode != null && messageNode.has("content") && !messageNode.get("content").isNull()) {
                        String content = messageNode.get("content").asText();
                        return Flux.just(content); // 只返回有效的content
                    }
                }
            }
        } catch (Exception e) {
            log.error("Error parsing JSON response: {}", e.getMessage());
        }
        return Flux.empty(); // 如果没有有效content，返回空Flux
    }

    public Flux<String> streamGenerate(OllamaGenerateRequest ollamaGenerateRequest) {
        ollamaGenerateRequest.setModel(chatModel);
        return webClient.post()
                .uri("/api/generate")
                .contentType(MediaType.APPLICATION_JSON)
                .accept(MediaType.TEXT_EVENT_STREAM)
                .bodyValue(ollamaGenerateRequest)
                .retrieve()
                .bodyToFlux(OllamaGenerateResponse.class)
                .doOnNext(response -> System.out.println("Received response: " + response.getResponse()))
                .map(OllamaGenerateResponse::getResponse);
    }

    public Flux<String> streamChatResponse(OllamaChatRequest chatRequest) {
        chatRequest.setModel(chatModel);
        chatRequest.setOptions(new OllamaOptions(0.7));
        chatRequest.setStream(true);
        return webClient.post()
                .uri("/api/chat")
                .contentType(MediaType.APPLICATION_JSON)
                .accept(MediaType.TEXT_EVENT_STREAM)
                .bodyValue(chatRequest)
                .retrieve()
                .bodyToFlux(OllamaChatResponse.class) // 返回流
                .filter(response -> response.getMessage() != null) // 确保 message 不为 null
                .map(OllamaChatResponse -> OllamaChatResponse.getMessage().getContent());
    }

    // not stream
    public String stringChatResponse(OllamaChatRequest chatRequest) {
        chatRequest.setModel(chatModel);
        chatRequest.setStream(false);
        return webClient.post()
                .uri("/api/chat")
                .contentType(MediaType.APPLICATION_JSON)
                .accept(MediaType.APPLICATION_JSON)
                .bodyValue(chatRequest)
                .retrieve()
                .bodyToMono(OllamaChatResponse.class)
                .map(OllamaChatResponse -> OllamaChatResponse.getMessage().getContent())
                .block();
    }

    public OllamaEmbeddingResponse embeddings(OllamaEmbeddingRequest ollamaEmbeddingRequest) {
        ollamaEmbeddingRequest.setModel(embeddingModel);
        return webClient.post()
                .uri("/api/embeddings")
                .contentType(MediaType.APPLICATION_JSON)
                .accept(MediaType.APPLICATION_JSON)
                .bodyValue(ollamaEmbeddingRequest)
                .retrieve()
                .bodyToMono(OllamaEmbeddingResponse.class)
                .block();
    }


    public void saveEmbeddings(OllamaEmbeddingRequest ollamaEmbeddingRequest, String fileName) {
        // 空值
        if (StringUtils.isBlank(ollamaEmbeddingRequest.getPrompt())) {
            throw new IllegalArgumentException("prompt is null");
        }
        Embedding embedding = new Embedding();
        embedding.setId(String.valueOf(UUID.randomUUID()));
        embedding.setContent(ollamaEmbeddingRequest.getPrompt());
        embedding.setFileId(fileName);
        List<Double> embeddingList = embeddings(ollamaEmbeddingRequest).getEmbedding();
        if (embeddingList != null) {
            // 将 List<Double> 转换为逗号分隔的字符串
            String vector = embeddingList.stream()
                    .map(String::valueOf)
                    .collect(Collectors.joining(","));
            System.out.println("vector: " + vector);
            embedding.setVector(vector);
        } else {
            System.out.println("embeddingList is null");
            embedding.setVector(null);
        }
        embeddingService.save(embedding);
    }

    public List<Embedding> getAllEmbeddings() {
        return embeddingService.list();
    }

    // for demo
    public List<Embedding> getAllFiles() {
        List<Embedding> allEmbeddings = embeddingService.list();

        if (allEmbeddings == null || allEmbeddings.isEmpty()) {
            return List.of(); // 返回空列表而不是 null
        }

        return allEmbeddings.stream()
                .collect(Collectors.collectingAndThen(
                        Collectors.toMap(Embedding::getFileId, embedding -> embedding, (existing, replacement) -> existing),
                        map -> new ArrayList<>(map.values())
                ));
    }


    // 1. 将用户输入的内容转换成embedding向量
    // 2. 查询数据库中所有的embeddings，比较找出前三个的最相似的embedding
    // 3. 构建问题，请求chat接口
    public Flux<String> localKnowledgeChat(OllamaChatRequest chatRequest) {
        // 获取所有嵌入数据并计算相似度
        List<Embedding> allEmbeddings = getAllEmbeddings();
        if (allEmbeddings.isEmpty()) {
            return Flux.just("The local knowledge base is empty, please contact the administrator to supplement.");
        }

        // 1. 如果chatRequest.getMessages().size() > 1,就代表没有上下文，直接embedding , 如果有上下文，则调用chat接口，用得到的返回结果去embedding
//        String content = getStandaloneQuestion(chatRequest);

        // 暂时取最后一个
        String content = chatRequest.getMessages().get(chatRequest.getMessages().size() - 1).getContent();

        List<Embedding> topNEmbeddings = getRelatedDoc(content);


        String context;
        // 遍历topNEmbeddings,拼接context并输入内容
        context = topNEmbeddings.stream()
                .map(Embedding::getContent)
                .collect(Collectors.joining("\n"));
        String followUpQuestion = chatRequest.getMessages().get(chatRequest.getMessages().size() - 1).getContent();

        // 构建提示信息
        String prompt = "Context:  " + context +  "You are a helpful AI assistant from Bytesforce. Use the pieces of context above to answer the question at the end. " +
                "If the question is unrelated to the context, just kindly say you don't know, DO NOT try to make up an answer. " +
               "  Question: " + followUpQuestion;
        OllamaChatRequest lastChatRequest = new OllamaChatRequest();
        lastChatRequest.setStream(true);
        lastChatRequest.setMessages(List.of(new OllamaMessage("user", prompt)));

        System.out.println(lastChatRequest);
        return deepseekChat(lastChatRequest);
    }

    // 1. 上传文件 -> demo先不写这个
    // 2. 将文件读取成字符串
    // 3. 文章分割
    // 4. 将所有文章的分片embedding入库
    public void embeddingsFile(String filePath, String fileName) {
        try {
            String content = WordUtil.readFile(filePath);
            List<String> contentList = WordUtil.split(content);
            int totalLen = contentList.size();
            for (int i = 0; i < totalLen; i++) {
                String s = contentList.get(i);
                OllamaEmbeddingRequest ollamaEmbeddingRequest = new OllamaEmbeddingRequest();
                // embedding的内容
                ollamaEmbeddingRequest.setPrompt(s);
                saveEmbeddings(ollamaEmbeddingRequest, fileName);

                // 计算并打印进度
                double progress = ((double) (i + 1) / totalLen) * 100;
                System.out.printf("Progress: %.2f%% (%d/%d)\n", progress, i + 1, totalLen);
            }
            System.out.println("Embeddings file upload completed.");
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }

    public String getStandaloneQuestion(OllamaChatRequest ollamaChatRequest) {
        if (ollamaChatRequest.getMessages().size() <= 1) {
            return ollamaChatRequest.getMessages().get(0).getContent();
        }

        // 构建聊天历史部分
        String chatHistory = ollamaChatRequest.getMessages().stream()
                .map(message -> (message.getRole().equalsIgnoreCase("user") ? "Human: " : "Assistant: ") + message.getContent())
                .collect(Collectors.joining("\n"));

        // 获取最后一条消息作为后续问题
        String followUpInput = ollamaChatRequest.getMessages().get(ollamaChatRequest.getMessages().size() - 1).getContent();
        String content = String.format(
                "Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question.\nChat History: %s\nFollow Up Input: %s Standalone question:\n",
                chatHistory,
                followUpInput);
        System.out.println("build standalone request :\n" + content);
        OllamaChatRequest standaloneRequest = new OllamaChatRequest();
        standaloneRequest.setMessages(List.of(new OllamaMessage("user", content)));
        // 获取响应
//        String response = stringChatResponse(standaloneRequest);
        String response = deepseekChatNotStream(standaloneRequest);

        System.out.println("raw response:" + response);

        // 使用正则表达式去掉 <think></think> 标签之间的内容
        if (response != null && response.contains("<think>")) {
            String cleanedResponse = response.replaceAll("<think>[^<]*</think>", "");
            System.out.println("result question:" + cleanedResponse);
            return cleanedResponse;
        }
        return ollamaChatRequest.getMessages().get(0).getContent();
    }

    // delete embeddings by fileId
    public void deleteEmbeddingsByFileId(String fileId) {
        QueryWrapper<Embedding> queryWrapper = new QueryWrapper<>();
        queryWrapper.eq("file_id", fileId);
        embeddingService.remove(queryWrapper);
    }

    // get related doc
    public List<Embedding> getRelatedDoc(String question) {
        OllamaEmbeddingRequest ollamaEmbeddingRequest = new OllamaEmbeddingRequest();
        ollamaEmbeddingRequest.setPrompt(question);

        OllamaEmbeddingResponse embeddingResponse = embeddings(ollamaEmbeddingRequest);
        List<Double> userVector = embeddingResponse.getEmbedding();
        List<Embedding> allEmbeddings = getAllEmbeddings();
        List<Embedding> embeddingsWithSimilarity = calculateSimilarities(userVector, allEmbeddings);

        return getTopNEmbeddings(embeddingsWithSimilarity);
    }

    private List<Embedding> calculateSimilarities(List<Double> userVector, List<Embedding> allEmbeddings) {
        return allEmbeddings.stream()
                .peek(embedding -> {
                    List<Double> localVector = OllamaUtils.parseVector(embedding.getVector());
                    double similarity = OllamaUtils.calculate(userVector, localVector);
                    embedding.setSimilarity(similarity);
                })
                .sorted((e1, e2) -> Double.compare(e2.getSimilarity(), e1.getSimilarity()))
                .toList();
    }

    private List<Embedding> getTopNEmbeddings(List<Embedding> embeddings) {
        int totalLength = 0;
        List<Embedding> topNEmbeddings = new ArrayList<>();

        for (int i = 0; i < Math.min(topN, embeddings.size()); i++) {
            Embedding embedding = embeddings.get(i);
            if (totalLength + embedding.getContent().length() > MAX_CONTEXT_LEN) {
                break;
            }
            topNEmbeddings.add(embedding);
            totalLength += embedding.getContent().length();
        }

        return topNEmbeddings;
    }
}
