import { LLM } from "llama-node";
import { LLamaCpp } from "llama-node/dist/llm/llama-cpp.js";
import path from "path";
import { fileURLToPath } from "url";

// 获取当前文件路径
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);

// 初始化LLM
const llama = new LLM(LLamaCpp);

// 模型配置
const modelPath = path.join(__dirname,"../../resource", "llama-2-7b.ggmlv3.q4_0.bin");

const config = {
  modelPath,
  enableLogging: true,
  nCtx: 1024,
  seed: 0,
  f16Kv: false,
  logitsAll: false,
  vocabOnly: false,
  useMlock: false,
  embedding: false,
  useMmap: true,
  nGpuLayers: 0,
};

// 加载模型
async function loadModel() {
  console.log("⏳ Loading model...");
  try {
    await llama.load(config);
    console.log("✅ Model loaded successfully!");
    return true;
  } catch (error) {
    console.error("❌ Failed to load model:", error);
    return false;
  }
}

// 生成回答
async function generateAnswer(question) {
  return new Promise(async (resolve, reject) => {
    const stopSequence = "\n用户:";
    const prompt = `以下是一段对话。请以有帮助且准确的AI助手身份回答。回答完成后请停止。\n\n用户: ${question}\nAI:`;

    const params = {
      nThreads: 4,
      nTokPredict: 512,
      topK: 40,
      topP: 0.9,
      temp: 0.7,
      repeatPenalty: 1.1,
      prompt,
      stopSequence: stopSequence,
    };

    let fullResponse = "";
    let isCompleted = false;
    const startTime = Date.now();
    const timeout = 30000; // 30秒超时

    // 超时处理
    const timeoutId = setTimeout(() => {
      if (!isCompleted) {
        console.error("生成超时");
        resolve("回答生成超时，请重试");
        cleanup();
      }
    }, timeout);

    // 清理函数
    const cleanup = () => {
      clearTimeout(timeoutId);
      isCompleted = true;
    };

    try {
      await llama.createCompletion(params, (response) => {
        if (isCompleted) return;

        fullResponse += response.token;

        // 检查停止条件
        if (fullResponse.includes(stopSequence)) {
          const stopIndex = fullResponse.lastIndexOf(stopSequence);
          fullResponse = fullResponse.substring(0, stopIndex);
          resolve(fullResponse.trim());
          cleanup();
          return false; // 停止生成
        }
      });

      // 如果没有在回调中停止，则在这里完成
      if (!isCompleted) {
        const endTime = Date.now();
        console.log(
          `⏱️ 响应时间: ${((endTime - startTime) / 1000).toFixed(2)}秒`
        );
        resolve(fullResponse.trim());
        cleanup();
      }
    } catch (error) {
      console.error("\n❌ 生成回答时出错:", error);
      reject("抱歉，生成回答时出错");
      cleanup();
    }
  });
}

// 交互模式
async function interactiveMode() {
  const readline = await import("readline/promises");
  const rl = readline.default.createInterface({
    input: process.stdin,
    output: process.stdout,
  });

  console.log("\n💬 输入你的问题 (输入 'exit' 或按 Ctrl+C 退出)");

  while (true) {
    let question = await rl.question("\n👤 You: ");
    if (question.toLowerCase() === "exit") break;

    if (question.trim().length === 0) {
      console.log("⚠️  Please enter a valid question.");
      continue;
    }

    if (question) {
      let res = await generateAnswer(question);
      console.log(res);
    }
  }

  rl.close();
  console.log("\n👋 Goodbye!");
}

// 主函数
async function main() {
  const modelLoaded = await loadModel();
  if (!modelLoaded) return;

  await interactiveMode();
  process.exit(0);
}

main().catch(console.error);
