import os

from dotenv import load_dotenv

# 加载千问模型
from llama_index.llms.dashscope import DashScope

# VectorStoreIndex向量索引，SimpleDirectoryReader加载文档工具，从目录读取文件
from llama_index.core import VectorStoreIndex,SimpleDirectoryReader
# llama核心设置
from llama_index.core import Settings
# 加载本地embedding 模型
from llama_index.embeddings.huggingface import HuggingFaceEmbedding


load_dotenv()
model = "qwen-plus-2025-01-25"
api_key = os.getenv("DASHSCOPE_API_KEY")
api_base_url = os.getenv("DASHSCOPE_BASE_URL")

"""使用在线模型"""
# llm = DashScope(model_name=model, api_key=api_key, api_base_url=api_base_url,is_chat_model=True,max_tokens=1000)
#
# respones = llm.complete("帮我推荐一下5天的江浙旅游攻略")
#
# print(respones)

"""使用嵌入式模型"""
# LlamaIndex默认使用的大模型被替换为百炼
Settings.llm = DashScope(model_name=model, api_key=api_key, api_base_url=api_base_url,is_chat_model=True)
# 加载本地的嵌入模型
Settings.embed_model = HuggingFaceEmbedding(model_name="D:\\llm\\Local_model\\BAAI\\bge-large-zh-v1___5")
# 从文件目录加载文件,自动选择对应的文档加载器
documents = SimpleDirectoryReader("data").load_data()
# 从文档创建索引
index = VectorStoreIndex.from_documents(documents)
# 将索引转换为查询引擎
query_engine = index.as_query_engine()
response = query_engine.query("企业事件？")
print(response)