package org.gwh.airagknowledge.core.llm.impl;

import dev.langchain4j.model.localai.LocalAiChatModel;
import lombok.extern.slf4j.Slf4j;
import org.gwh.airagknowledge.core.llm.LlmService;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;

import java.time.Duration;

/**
 * Ollama本地模型服务实现
 */
@Slf4j
@Service
public class OllamaLlmService implements LlmService {

    private LocalAiChatModel model;
    private final String promptTemplate;
    private final String baseUrl;
    private final String modelName;
    private double temperature = 0.1;

    public OllamaLlmService(
            @Value("${llm.ollama.base-url}") String baseUrl,
            @Value("${llm.ollama.model-name}") String modelName,
            @Value("${llm.prompt.template}") String promptTemplate) {
        this.baseUrl = baseUrl;
        this.modelName = modelName;
        this.promptTemplate = promptTemplate;
        
        // 初始化本地LLM模型
        initializeModel();
        
        log.info("Ollama LLM service initialized with model: {} at {}", modelName, baseUrl);
    }

    private void initializeModel() {
        this.model = LocalAiChatModel.builder()
                .baseUrl(baseUrl)
                .modelName(modelName)
                .temperature(temperature)
                .timeout(Duration.ofSeconds(180))
                .build();
    }

    @Override
    public String generateAnswer(String question, String context) {
        try {
            log.info("Generating answer with Ollama for question: {}", question);
            
            // 构建提示模板
            String prompt = promptTemplate
                    .replace("{context}", context)
                    .replace("{question}", question);
            
            // 调用LLM生成答案
            String answer = model.generate(prompt);
            log.info("Generated answer of length: {}", answer.length());
            
            return answer;
        } catch (Exception e) {
            log.error("Error generating answer with Ollama", e);
            return "Sorry, I encountered an error while trying to answer your question.";
        }
    }

    @Override
    public void setTemperature(double temperature) {
        this.temperature = temperature;
        // 重新初始化模型以应用新的温度设置
        initializeModel();
        log.info("Updated temperature to {} for Ollama model", temperature);
    }

    @Override
    public String getModelName() {
        return "ollama:" + modelName;
    }
} 