package net.cyue.ort.llm;

import net.cyue.ort.llm.generator.GenerationConfig;
import net.cyue.ort.llm.template.ModelChatMessage;
import net.cyue.ort.llm.generator.GenerationCallback;
import net.cyue.ort.llm.generator.TextGenerator;
import net.cyue.ort.llm.util.TokenManager;
import net.cyue.ort.llm.util.PromptEngine;

import java.util.List;
import java.util.function.Consumer;

/**
 * 默认LLM客户端实现
 * 提供高级API，封装底层的生成逻辑
 */
public class DefaultLLMClient implements LLMClient {
    private final TextGenerator textGenerator;
    private final PromptEngine promptEngine;
    private final TokenManager tokenManager;
    private final Runnable closeAction;
    private boolean debug = false;
    
    public DefaultLLMClient(
        TextGenerator textGenerator,
        PromptEngine promptEngine,
        TokenManager tokenManager,
        Runnable closeAction
    ) {
        this.textGenerator = textGenerator;
        this.promptEngine = promptEngine;
        this.tokenManager = tokenManager;
        this.closeAction = closeAction;
    }
    
    @Override
    public String generate(String prompt, GenerationConfig config) {
        return textGenerator.generate(prompt, config, null);
    }
    
    @Override
    public String generateWithCallback(
        String prompt,
        GenerationConfig config,
        Consumer<String> callback
    ) {
        GenerationCallback genCallback = callback != null ? new GenerationCallback() {
            @Override
            public void onTokenGenerated(String token) {
                callback.accept(token);
                if (debug) {
                    System.out.print(token);
                }
            }
            
            @Override
            public void onComplete(String fullText) {
                if (debug) {
                    System.out.println();
                }
            }
            
            @Override
            public void onError(Exception e) {
                if (debug) {
                    System.err.println("Error: " + e.getMessage());
                }
            }
        } : null;
        
        return textGenerator.generate(prompt, config, genCallback);
    }
    
    @Override
    public String chat(List<ModelChatMessage> messages, GenerationConfig config) {
        String prompt = promptEngine.applyChatTemplate(messages);
        return generate(prompt, config);
    }
    
    @Override
    public LLMClient setDebug(boolean debug) {
        this.debug = debug;
        return this;
    }
    
    @Override
    public String applyChatTemplate(List<ModelChatMessage> messages) {
        return promptEngine.applyChatTemplate(messages);
    }
    
    @Override
    public void close() {
        if (closeAction != null) {
            closeAction.run();
        }
    }
}

