from pathlib import Path
from typing import List, Dict, Union
import json, csv, re
from llama_index.core import KnowledgeGraphIndex, StorageContext
from llama_index.core.graph_stores import SimpleGraphStore
from llama_index.core.node_parser import SimpleNodeParser
from config.llm import deepseek_llm  # ✅ 替换为自定义 LLM

LLM = deepseek_llm()  # ✅ 使用本地模型或自定义模型

TRIPLE_PATTERN = re.compile(r"(.+?)[\\s,，]*(是|属于|提出|拥有|包括)[\\s,，]*(.+)")

def _extract_triples_from_line(line: str) -> List[Dict[str, str]]:
    m = TRIPLE_PATTERN.search(line)
    if m:
        subj, rel, obj = m.groups()
        return [{"source": subj.strip(), "relation": rel.strip(), "target": obj.strip()}]
    return []

def build_kg_from_file(file_path: Union[str, Path]) -> List[Dict[str, str]]:
    file_path = Path(file_path)
    triples = []
    if file_path.suffix.lower() in {".txt", ".md"}:
        lines = file_path.read_text(encoding="utf-8", errors="ignore").splitlines()
        for line in lines:
            triples.extend(_extract_triples_from_line(line.strip()))
    elif file_path.suffix.lower() == ".csv":
        with file_path.open("r", encoding="utf-8", errors="ignore") as f:
            for row in csv.reader(f):
                for cell in row:
                    triples.extend(_extract_triples_from_line(cell))
    elif file_path.suffix.lower() == ".json":
        data = json.loads(file_path.read_text(encoding="utf-8", errors="ignore"))
        if isinstance(data, list):
            for item in data:
                text = item if isinstance(item, str) else item.get("text", "")
                triples.extend(_extract_triples_from_line(text))
    if not triples:
        triples.append({"source": file_path.stem, "relation": "包含", "target": "内容"})
    return triples

def _build_lindex(triples: List[Dict[str, str]]):
    sentences = [f"{t['source']} {t['relation']} {t['target']}" for t in triples]
    docs = [{"text": s} for s in sentences]
    nodes = SimpleNodeParser().get_nodes_from_documents(docs)
    graph_store = SimpleGraphStore()
    storage = StorageContext.from_defaults(graph_store=graph_store)
    return KnowledgeGraphIndex(nodes=nodes, storage_context=storage, llm=LLM)

def ask_question_with_kg(question: str, triples: List[Dict[str, str]] = None) -> str:
    if not triples:
        return LLM.complete(f"回答问题：{question}")
    kg_index = _build_lindex(triples)
    engine = kg_index.as_query_engine(llm=LLM, similarity_top_k=3)
    return str(engine.query(question))
