package net.cyue.ort.llm.gui.chat;

import net.cyue.ort.llm.LLMClient;
import net.cyue.ort.llm.generator.GenerationConfig;
import net.cyue.ort.llm.gui.state.AppState;
import net.cyue.ort.llm.template.ModelChatMessage;
import net.cyue.util.StringUtil;

import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

/**
 * 负责编排用户输入、模型推理与视图更新的协调逻辑，实现UI与业务的解耦。
 */
public class ChatController implements AutoCloseable {

    private final AppState appState;
    private final ExecutorService executor =
            Executors.newSingleThreadExecutor(r -> {
                Thread thread = new Thread(r, "chat-generator");
                thread.setDaemon(true);
                return thread;
            });

    private ChatView view;

    public ChatController(AppState appState) {
        this.appState = appState;
    }

    public void attachView(ChatView view) {
        this.view = view;
    }

    public void handleUserInput(String rawInput) {
        if (view == null) {
            throw new IllegalStateException("ChatView 尚未初始化");
        }
        String input = rawInput == null ? "" : rawInput.trim();
        if (input.isEmpty()) {
            view.clearInput();
            return;
        }

        if (!appState.hasModelLoaded()) {
            view.showSystemMessage("请先选择并加载模型");
            view.clearInput();
            return;
        }

        if (!appState.startGeneration()) {
            view.showSystemMessage("正在生成上一个请求的响应，请稍候");
            return;
        }

        view.showUserMessage(input);
        view.clearInput();
        view.setInputEnabled(false);

        executor.submit(() -> generateResponse(input));
    }

    private void generateResponse(String userInput) {
        boolean assistantStarted = false;
        int userMessageIndex = -1;
        try {
            LLMClient llmClient = appState.getLlmClient()
                    .orElseThrow(() -> new IllegalStateException("模型尚未加载"));
            GenerationConfig generationConfig = appState.getGenerationConfig()
                    .orElseThrow(() -> new IllegalStateException("生成配置尚未加载"));

            view.beginAssistantResponse();
            assistantStarted = true;

            userMessageIndex = appState.getHistory().size();
            appState.getHistory().add(new ModelChatMessage(ModelChatMessage.Role.USER, userInput));

            String response = llmClient.generateWithCallback(
                    llmClient.applyChatTemplate(appState.getHistory()),
                    generationConfig,
                    chunk -> {
                        if (!StringUtil.isBlank(chunk)) {
                            view.appendAssistantResponse(chunk);
                        }
                    }
            );

            if (StringUtil.isBlank(response)) {
                appState.getHistory().remove(appState.getHistory().size() - 1);
            } else {
                appState.getHistory().add(
                        new ModelChatMessage(ModelChatMessage.Role.ASSISTANT, response)
                );
            }
        } catch (Exception ex) {
            view.showSystemMessage("生成失败：" + ex.getMessage());
            ex.printStackTrace();
            if (userMessageIndex >= 0 && appState.getHistory().size() > userMessageIndex) {
                appState.getHistory().remove(userMessageIndex);
            }
        } finally {
            if (assistantStarted) {
                view.endAssistantResponse();
            }
            appState.finishGeneration();
            view.setInputEnabled(true);
        }
    }

    @Override
    public void close() {
        executor.shutdownNow();
    }
}

