import os
import pickle
import shutil
import chromadb
import sqlite3
from datetime import datetime
from multiprocessing import Lock
from multiprocessing.managers import BaseManager

from llama_index.core import (
    Settings,
    SimpleDirectoryReader,
    VectorStoreIndex,
    StorageContext,
    load_index_from_storage)

from llama_index.embeddings.ollama import OllamaEmbedding
from llama_index.vector_stores.chroma import ChromaVectorStore 
from llama_index.llms.ollama import Ollama
from llama_index.core.node_parser import SentenceSplitter

# 配置大模型参数
# 使用 deepseek-r1:8b 作为语言模型，bge-m3:latest 作为嵌入模型
Settings.llm = Ollama(model="deepseek-r1:8b")
# Settings.llm = Ollama(model="deepseek-r1:1.5b")
Settings.embed_model = OllamaEmbedding(model_name="bge-m3:latest")

# 全局变量定义
index = None          # 存储向量索引实例
stored_docs = {}      # 缓存已存储的文档内容片段
lock = Lock()         # 进程锁保证多进程安全
index_name = "./saved_index"  # 索引持久化目录
pkl_name = "stored_documents.pkl"  # 文档缓存文件
SERVER_PORT = 5602    # RPC 服务端口号
CHROMA_HOST = "localhost"  # ChromaDB 服务地址
CHROMA_PORT = 8000    # ChromaDB 服务端口
UPLOAD_DIR = "./uploaded_docs"  # 文档上传存储目录
DB_NAME = "doc_metadata.db"  # 元数据数据库文件名
def initialize_index():
    """初始化全局索引，包含以下功能：
    1. 创建上传目录
    2. 初始化元数据数据库
    3. 连接 ChromaDB 向量存储
    4. 加载或创建索引
    5. 加载文档缓存"""
    global index, stored_docs

    try:
        # 新增目录创建
        os.makedirs(UPLOAD_DIR, exist_ok=True)
        with sqlite3.connect(DB_NAME) as conn:
            conn.execute('''CREATE TABLE IF NOT EXISTS documents
                         (doc_id TEXT PRIMARY KEY,
                          name TEXT NOT NULL,
                          path TEXT NOT NULL,
                          upload_time TIMESTAMP,
                          size INTEGER)''')


        chroma = chromadb.HttpClient(host=CHROMA_HOST, port=CHROMA_PORT)
        collection = chroma.get_or_create_collection(name="chat_docs_collection")
        vector_store = ChromaVectorStore(chroma_collection=collection)

        with lock:
            if os.path.exists(index_name):
                storage_context = StorageContext.from_defaults(persist_dir=index_name, vector_store=vector_store)
                index = load_index_from_storage(storage_context=storage_context)
            else:
                storage_context = StorageContext.from_defaults(vector_store=vector_store)
                index = VectorStoreIndex([], storage_context=storage_context)
                index.storage_context.persist(persist_dir=index_name)
    except Exception as e:
        print(f"Error initializing index: {e}")
def query_index(query_text):
    global index
    try:
        if index is None:
            initialize_index()
            
        query_engine = index.as_query_engine(
            similarity_top_k=3,
            llm=Settings.llm,
            response_mode="compact"
        )
        response = query_engine.query(query_text)
        
        # 新增空响应检查
        if not response or len(response.source_nodes) == 0:
            return "未找到相关文档内容，请尝试其他查询"
            
        return response
    except Exception as e:
        print(f"查询失败: {str(e)}")
        return f"查询过程中发生错误: {str(e)}"
        return None
def insert_into_index(doc_file_path):
    global index
    try:
        if index is None:
            initialize_index()
            
        # 读取已存储的文档
        document = SimpleDirectoryReader(input_files=[doc_file_path]).load_data()[0]
        
        # 文档分块处理
        splitter = SentenceSplitter(
            chunk_size=300,
            chunk_overlap=50,
            separator=r'(。|！|？)',
            paragraph_separator="\n\n"
        )
        nodes = splitter.get_nodes_from_documents([document], show_progress=False)
        
        # 设置元数据
        for node in nodes:
            node.metadata["doc_id"] = document.doc_id
            
        # 插入向量库
        with lock:
            index.insert_nodes(nodes)
            index.storage_context.persist(persist_dir=index_name)
            
        return True
    except Exception as e:
        print(f"文档处理失败: {e}")
        return False
def get_documents_list():
    
    if index is None:  # 使用index作为初始化标志
        initialize_index()
    with sqlite3.connect(DB_NAME) as conn:
        cursor = conn.execute("SELECT doc_id, name, path, upload_time, size FROM documents")
        return [{
            "id": row[0],
            "name": row[1],
            "path": row[2],
            "upload_time": row[3],
            "size": row[4]
        } for row in cursor.fetchall()]
def delete_from_index(doc_id: str):

    #打印doc_id
    print(f"Deleting document: {doc_id}")
    global index
    try:
        if index is None:
            initialize_index()
        
        with lock:
            # 删除向量库数据
            # vector_store = index.storage_context.vector_store
            # collection = vector_store.chroma_collection
            # # 新增元数据过滤删除
            # collection.delete(where={"doc_id": doc_id})
            
            # 删除文档记录和文件
            with sqlite3.connect(DB_NAME) as conn:
                # 先获取文件路径
                cur = conn.execute("SELECT path FROM documents WHERE doc_id = ?", (doc_id,))
               
                if (row := cur.fetchone()):
                    file_path = row[0]
                    # 打印file_path
                    print(f"Deleting file: {file_path}")

                    if os.path.exists(file_path):
                        os.remove(file_path)
                
                # 删除数据库记录并校验影响行数
                cur = conn.execute("DELETE FROM documents WHERE doc_id = ?", (doc_id,))
                if cur.rowcount == 0:  # 新增行数校验
                    return False  # 没有找到要删除的记录
                
            # 持久化索引变更
            index.storage_context.persist(persist_dir=index_name)
            
        return True  # 所有操作成功完成
    except Exception as e:
        print(f"删除失败: {e}")
        return False

if __name__ == "__main__":
    print("initializing index...")
    initialize_index()

    print(f'Create server on port {SERVER_PORT}...')
    manager = BaseManager(('', SERVER_PORT), b'password')
    print("registering functions...")
    manager.register('query_index', query_index)
    manager.register('insert_into_index', insert_into_index)
    manager.register('get_documents_list', get_documents_list)
    manager.register('delete_from_index', delete_from_index) # 注册新函数
    server = manager.get_server()
    print("index server started...")
    server.serve_forever()

    