import os
import sqlite3
import datetime

from typing import Dict, Any
from langchain_openai import OpenAIEmbeddings
from langchain_openai import OpenAIEmbeddings
from langchain_core.documents import Document
from langchain_community.vectorstores import FAISS


class RAGManager:
    def __init__(self, cfg: Dict[str, Any]):
        embedding = OpenAIEmbeddings(
            api_key=cfg.get('api_key'),
            base_url=cfg.get('base_url')
        )

        self.top_k = cfg.get('rag_top_k')
        self.rag_path = cfg.get('rag_path')
        if self.rag_path and os.path.exists(self.rag_path):
            self.index = FAISS.load_local(self.rag_path, embedding, allow_dangerous_deserialization=True)
        else:
            dummy_doc = Document(page_content="init", metadata={})
            vectorstore = FAISS.from_documents([dummy_doc], embedding=embedding)
            doc_ids = list(vectorstore.docstore._dict.keys())
            vectorstore.delete(doc_ids)
            vectorstore.save_local(self.rag_path)
            self.index = vectorstore
        
        os.makedirs(self.rag_path, exist_ok=True)
        self.sqlite_path = os.path.join(self.rag_path, "kv.db")
        conn = sqlite3.connect(self.sqlite_path)
        cursor = conn.cursor()
        cursor.execute('CREATE TABLE IF NOT EXISTS kv (key TEXT PRIMARY KEY, value TEXT, type TEXT)')
        conn.commit()
        conn.close()

    def add(self, key, value, type):
        conn = sqlite3.connect(self.sqlite_path)
        cursor = conn.cursor()
        cursor.execute('REPLACE INTO kv (key, value, type) VALUES (?, ?, ?)', (key, value, type))
        conn.commit()
        conn.close()

        docstore = self.index.docstore._dict
        ids_to_delete = [doc_id for doc_id, doc in docstore.items() if doc.page_content == key]
        if ids_to_delete:
            self.index.delete(ids_to_delete)
            
        doc = Document(
            page_content=key,
            metadata={
                "value": value,
                "type": type
            }
        )
        self.index.add_documents([doc])
        if self.rag_path:
            self.index.save_local(self.rag_path)

    def add_bidirectional(self, key, value, type):
        self.add(key, value, type)
        self.add(value, key, type)

    def search(self, query):
        results = self.index.similarity_search(query, k=self.top_k or 3)
        lines = []
        for doc in results:
            value = doc.metadata.get("value", "")
            typ = doc.metadata.get("type", "")
            key = doc.page_content
            lines.append(f"[{typ}] {key}: {value}")
        return "\n".join(lines)

    def store(self, data: dict):
        for k, v in data.get("static", {}).items():
            self.add_bidirectional(k, v, type="static")

        for k, v in data.get("dynamic", {}).items():
            self.add(k, v, type="dynamic")

        for k, v in data.get("ideas", {}).items():
            self.add(k, v, type="ideas")

        now_str = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
        for e in data.get("events", []):
            key = f"{now_str} {e.get('time', '')} {e.get('location', '')} {e.get('people', '')}".strip()
            value = e.get("description", "")
            self.add_bidirectional(key, value, type="events")

        for s in data.get("solutions", []):
            if s.get("iswork", False) in [True, "true", "True"]:
                self.add(s["problem"], s["solution"], type="solution")
