from llama_index.core import VectorStoreIndex, SimpleDirectoryReader, Settings
from llama_index.core.query_engine import SubQuestionQueryEngine
from llama_index.core.tools import QueryEngineTool
from llms import deepseek_llm
from embeddings import embed_model_local_bge_small
Settings.llm = deepseek_llm()
Settings.embed_model = embed_model_local_bge_small()

cities = ["changchun", "hangzhou", "lanzhou", "shenzhen"]
cities_data = SimpleDirectoryReader(input_dir="data").load_data()
vector_index = VectorStoreIndex.from_documents(cities_data, show_progress=True)
vector_query_engine_tool = QueryEngineTool.from_defaults(
    query_engine=vector_index.as_query_engine(),
    description=f"用于回答城市的相关信息",
)
s_engine = SubQuestionQueryEngine.from_defaults(
    query_engine_tools=[vector_query_engine_tool]
)
ss = s_engine.as_query_component(streaming=True)

s = s_engine.query("分别介绍一下杭州和深圳的历史")
print(s)
