from langchain_community.chat_models import ChatTongyi
from llama_index.core import Settings, SimpleDirectoryReader, StorageContext, VectorStoreIndex
from llama_index.vector_stores.milvus import MilvusVectorStore
from llama_index.embeddings.dashscope import (
    DashScopeEmbedding,
    DashScopeTextEmbeddingModels,
    DashScopeTextEmbeddingType
)
from llama_index.llms.langchain import LangChainLLM
from pymilvus import MilvusClient

#词嵌入模型
Settings.embed_model = DashScopeEmbedding(
    model_name=DashScopeTextEmbeddingModels.TEXT_EMBEDDING_V3,
    text_type=DashScopeTextEmbeddingType.TEXT_TYPE_DOCUMENT,
    api_key="sk-f97e3654139742a4b01a99631628d36d"
)

# 初始化LLM
Settings.llm = LangChainLLM(
    ChatTongyi(model="qwen-plus", api_key="sk-f97e3654139742a4b01a99631628d36d")
)

#加载文档
documents = SimpleDirectoryReader(r"D:\Code\sshcode\llamaindex").load_data()

vector_store = MilvusVectorStore(
    uri="http://localhost:19530",  # 网络地址，不是本地文件
    collection_name="llama_demo",
    dim=1024  # 向量维度，需与你的模型输出一致
)
storage_context = StorageContext.from_defaults(vector_store=vector_store)
index = VectorStoreIndex.from_documents(documents,storage_context=storage_context)
query_engine = index.as_query_engine()
res = query_engine.query("怎么退款")
print(res)