import os
import shutil
import socket
import threading
import time
from datetime import datetime

import schedule
from fastapi import FastAPI, UploadFile, Form, Request, HTTPException
from fastapi.staticfiles import StaticFiles
from fastapi.templating import Jinja2Templates
from llama_index.core import Settings
from llama_index.core import VectorStoreIndex, SimpleDirectoryReader, StorageContext
from llama_index.core.evaluation import FaithfulnessEvaluator
from llama_index.core.node_parser import SentenceSplitter
from llama_index.embeddings.huggingface import HuggingFaceEmbedding
from llama_index.llms.huggingface import HuggingFaceLLM
from pydantic import BaseModel

app = FastAPI()
app.mount("/static", StaticFiles(directory="static"), name="static")
templates = Jinja2Templates(directory="templates")

# 配置存储路径
UPLOAD_DIR = "data"
os.makedirs(UPLOAD_DIR, exist_ok=True)

embed_model = HuggingFaceEmbedding(
    model_name=r"E:/modelscope_models/Ceceliachenen/paraphrase-multilingual-MiniLM-L12-v2"
    # model_name=r"E:/modelscope_models/iic/nlp_gte_sentence-embedding_chinese-large"
)

llm = HuggingFaceLLM(
    model_name=r"E:/modelscope_models\Qwen\Qwen2___5-0___5B-Instruct",
    # model_name=r"E:/modelscope_models/deepseek-ai/DeepSeek-R1-Distill-Qwen-1___5B",
    tokenizer_name=r"E:/modelscope_models\Qwen\Qwen2___5-0___5B-Instruct",
    # tokenizer_name=r"E:/modelscope_models/deepseek-ai/DeepSeek-R1-Distill-Qwen-1___5B",
    model_kwargs={"trust_remote_code": True},
    tokenizer_kwargs={"trust_remote_code": True}
)

# 初始化HuggingFace模型
Settings.embed_model = embed_model
Settings.llm = llm

# 初始化Ollama模型
# Settings.embed_model = OllamaEmbedding(model_name="llama2")
# Settings.llm = Ollama(model="llama2")

# 全局设置
text_splitter = SentenceSplitter(chunk_size=512, chunk_overlap=100)
Settings.text_splitter = text_splitter

# define evaluator
evaluator = FaithfulnessEvaluator(llm=llm)


# 初始化向量索引
def initialize_index():
    vector_store_dir = "vector_store"
    required_files = ["docstore.json", "vector_store.json"]

    def check_index_files():
        return all(os.path.exists(os.path.join(vector_store_dir, f)) for f in required_files)

    try:
        if check_index_files():
            storage_context = StorageContext.from_defaults(persist_dir=vector_store_dir)
            # 二次验证索引有效性
            if len(storage_context.docstore.docs) > 0:
                return VectorStoreIndex(storage_context=storage_context)
            print("⚠️ 检测到空索引，将重建...")

        # 重建索引流程
        if len(os.listdir(UPLOAD_DIR)) == 0:
            print("ℹ️ 数据目录为空，创建空索引")
            index = VectorStoreIndex([])
        else:
            print("🔄 从数据文件重建索引...")
            documents = SimpleDirectoryReader(UPLOAD_DIR).load_data()
            index = VectorStoreIndex.from_documents(documents, show_progress=True, transformations=[text_splitter])

        # 持久化前清理旧数据
        shutil.rmtree(vector_store_dir, ignore_errors=True)
        os.makedirs(vector_store_dir, exist_ok=True)
        index.storage_context.persist(persist_dir=vector_store_dir)
        return index

    except Exception as e:
        print(f"❌ 索引初始化失败: {str(e)}")
        raise HTTPException(status_code=500, detail=f"Index initialization failed: {str(e)}")


index = initialize_index()


@app.post("/upload/")
async def upload_file(file: UploadFile):
    file_path = os.path.join(UPLOAD_DIR, file.filename)
    with open(file_path, "wb") as buffer:
        shutil.copyfileobj(file.file, buffer)

    # 增量更新索引
    global index
    try:
        # 加载新文档
        new_docs = SimpleDirectoryReader(input_files=[file_path]).load_data()
        # 获取当前索引的节点
        existing_nodes = index.docstore.docs
        # 创建新节点
        new_nodes = Settings.node_parser.get_nodes_from_documents(new_docs)
        # 添加新节点到索引
        index.insert_nodes(new_nodes)
        # 持久化更新
        index.storage_context.persist()
    except Exception as e:
        # 上传失败时回滚
        os.remove(file_path)
        raise HTTPException(status_code=500, detail=f"文件处理失败: {str(e)}")

    return {"filename": file.filename}


@app.get("/check_index")
async def check_index():
    try:
        doc_count = len(index.docstore.docs)
        return {
            "status": "healthy",
            "document_count": doc_count,
            "index_path": os.path.abspath("vector_store")
        }
    except Exception as e:
        return {"status": "error", "detail": str(e)}


# @app.post("/ask/")
# async def ask_question(question: str = Form(...)):
#     query_engine = index.as_query_engine()
#     response = query_engine.query(question)
#     return {"answer": str(response)}


@app.post("/ask/")
async def chat(question: str = Form(...)):
    # query_engine = index.as_query_engine()
    query_engine = index.as_query_engine(
        similarity_top_k=3,  # 显示前3个相关文档
        response_mode="tree_summarize",  # 需要此模式保留源节点
    )
    response = query_engine.query(question)

    # 打印答案
    print(f"答案：{response}")

    # 打印来源文档
    print("\n==== 参考文档 ====")
    for node in response.source_nodes:
        print(f"文档内容：{node.text[:200]}...")  # 打印前200字符
        print(f"来源文件：{node.metadata['file_path']}")
        print(f"相似度得分：{node.score:.4f}\n{'-' * 40}")

    # query_engine = index.as_chat_engine()
    # response = query_engine.chat(question)
    # response = await agent.run(question, ctx=ctx)

    # eval_result = evaluator.evaluate_response(query=question, response=response)
    # print(str(eval_result.passing))

    return {"answer": str(response)}


# 添加定期索引维护任务
def index_maintenance():
    if index.refresh():
        index.storage_context.persist()
        print("🛠 索引维护完成")


# 每1小时执行一次维护
schedule.every(1).hours.do(index_maintenance)


# 启动后台线程
def run_scheduler():
    while True:
        schedule.run_pending()
        time.sleep(1)


threading.Thread(target=run_scheduler, daemon=True).start()


class ChatHistoryItem(BaseModel):
    question: str
    answer: str
    timestamp: str


@app.post("/history/")
async def save_history(item: ChatHistoryItem):
    # 这里可以添加数据库存储逻辑
    return {"status": "success"}


@app.get("/history/")
async def get_history():
    # 示例数据，实际应从数据库获取
    return [
        {"question": "示例问题", "answer": "示例回答", "timestamp": datetime.now().isoformat()}
    ]


# 文件删除接口
@app.delete("/file/{filename}")
async def delete_file(filename: str):
    file_path = os.path.join(UPLOAD_DIR, filename)
    if os.path.exists(file_path):
        os.remove(file_path)
        # 更新索引
        global index
        index = initialize_index()
        return {"status": "success"}
    return {"status": "file not found"}


@app.get("/")
async def main(request: Request):
    files = os.listdir(UPLOAD_DIR)
    return templates.TemplateResponse("index.html", {"request": request, "files": files})


def get_ip():
    hostname = socket.gethostname()
    ip_address = socket.gethostbyname(hostname)
    return ip_address


if __name__ == "__main__":
    import uvicorn

    uvicorn.run(app, host=get_ip(), port=8000)
