from llama_index.core import VectorStoreIndex, SimpleDirectoryReader
from llama_index.core.indices import EmptyIndex
from llama_index.core.memory import ChatMemoryBuffer

# 存储不同会话的chat_engine实例
session_chat_engines = {}

index_vector_store = EmptyIndex()  # 先定义全局变量

def get_chat_engine_for_session(session_id):
    # 创建对话引擎，建议微调一个大模型
    if session_id not in session_chat_engines:
        # 为每个会话创建独立的memory和query_engine
        memory = ChatMemoryBuffer.from_defaults(chat_history=[])
        query_engine = index_vector_store.as_chat_engine(
            memory=memory,
            system_prompt="你是法律智能助手"
        )
        session_chat_engines[session_id] = query_engine

    return session_chat_engines[session_id]
