import express from "express";
import fs from "fs";
import fetch from "node-fetch";
import Database from "better-sqlite3";
import "sqlite-vss";


const app = express();
app.use(express.json());

// ========== 初始化 SQLite ==========
const db = new Database("library.db");


// 创建表
db.exec(`
    CREATE TABLE IF NOT EXISTS books
    (
        id
        INTEGER
        PRIMARY
        KEY
        AUTOINCREMENT,
        title
        TEXT,
        author
        TEXT,
        category
        TEXT,
        summary
        TEXT,
        embedding
        BLOB
    );
`);

// ======= 生成 embedding 的函数 =======
async function embed(text) {
    const res = await fetch("http://127.0.0.1:11434/api/embeddings", {
        method: "POST",
        headers: {"Content-Type": "application/json"},
        body: JSON.stringify({
            model: "nomic-embed-text", // Ollama 自带的 embedding 模型
            prompt: text
        })
    });
    const data = await res.json();
    return data.embedding;
}

// ======= 初始化数据库：存 embedding =======
async function initBooks() {
    const books = JSON.parse(fs.readFileSync("books.json", "utf-8"));
    const stmt = db.prepare(`
        INSERT INTO books (title, author, category, summary, embedding)
        VALUES (?, ?, ?, ?, ?)
    `);

    for (let b of books) {
        const text = `${b.title} ${b.author} ${b.category} ${b.summary}`;
        const emb = await embed(text);
        stmt.run(b.title, b.author, b.category, b.summary, Buffer.from(new Float32Array(emb).buffer));
    }
    console.log("✅ Books initialized with embeddings");
}

// ======= 检索函数 =======
function searchBooks(questionEmb, topK = 3) {
    const rows = db.prepare(`
        SELECT id,
               title,
               author,
               category,
               summary,
               embedding
        FROM books
    `).all();

    // 手动算余弦相似度（简单实现）
    function cosineSim(vec1, vec2) {
        let dot = 0, normA = 0, normB = 0;
        for (let i = 0; i < vec1.length; i++) {
            dot += vec1[i] * vec2[i];
            normA += vec1[i] ** 2;
            normB += vec2[i] ** 2;
        }
        return dot / (Math.sqrt(normA) * Math.sqrt(normB));
    }

    const results = rows.map(r => {
        const emb = new Float32Array(r.embedding.buffer, r.embedding.byteOffset, r.embedding.byteLength / 4);
        return {...r, score: cosineSim(questionEmb, emb)};
    });

    return results.sort((a, b) => b.score - a.score).slice(0, topK);
}


app.use((req, res, next) => {
    res.setHeader("Access-Control-Allow-Origin", "*");
    res.setHeader("Access-Control-Allow-Methods", "GET,POST,OPTIONS");
    res.setHeader("Access-Control-Allow-Headers", "Content-Type");
    next();
});

// ======= RAG Chat =======
app.post("/rag-chat", async (req, res) => {
    const {question, stream} = req.body;

    // 1. 生成 query embedding
    const qEmb = await embed(question);

    // 2. 搜索最相关书籍
    const relatedBooks = searchBooks(qEmb);

    const context = relatedBooks.map(b =>
        `《${b.title}》 by ${b.author} [${b.category}] - ${b.summary}`
    ).join("\n");

    // 3. 拼接 prompt
    const prompt = `
你是一个图书馆助理。
用户的问题: ${question}
以下是图书馆里找到的相关书籍信息：
${context || "没有找到相关书籍。"}
请基于这些书籍信息回答用户。
  `;

    // 4. 调用 Ollama
    const response = await fetch("http://127.0.0.1:11434/api/chat", {
        method: "POST",
        headers: {"Content-Type": "application/json"},
        body: JSON.stringify({
            model: "llama3.1",
            messages: [{role: "user", content: prompt}],
            stream: stream || false
        })
    });

    if (stream) {
        // 设置浏览器端接收流
        res.setHeader("Content-Type", "text/event-stream");
        res.setHeader("Cache-Control", "no-cache");
        // Node.js stream 方式转发
        response.body.on("data", chunk => {
            res.write(chunk);
        });
        response.body.on("end", () => {
            res.end();
        });
        response.body.on("error", err => {
            console.error(err);
            res.end();
        });
    } else {
        const data = await response.json();
        res.json(data);
    }
});

// ===== 启动服务器 =====
app.listen(3000, async () => {
    console.log("🚀 RAG server running on http://127.0.0.1:3000");
    await initBooks();
});
