from llama_index.core import VectorStoreIndex, SimpleDirectoryReader
from llama_index.core.memory import ChatMemoryBuffer

# 存储不同会话的query_engine实例
from init_config import Config

session_query_engines = {}

index_vector_store = VectorStoreIndex  # 先定义全局变量


def set_vector_store(index: VectorStoreIndex):
    global index_vector_store
    index_vector_store = index


def get_query_engine_for_session(session_id):
    # 创建查询引擎
    if session_id not in session_query_engines:
        query_engine = index_vector_store.as_query_engine(
            similarity_top_k=Config.TOP_K,
            text_qa_template=Config.TEMPLATE,
            verbose=True
        )
        session_query_engines[session_id] = query_engine

    return session_query_engines[session_id]
