# 安装chroma pip install chromadb
import chromadb
from llama_index.core import SimpleDirectoryReader, StorageContext, VectorStoreIndex, Settings
# llamaIndex(0.6+)需要导入VectorStore的扩展包  pip install llama-index-vector-stores-chroma
from llama_index.vector_stores.chroma import ChromaVectorStore

from config.embeddings import embed_model_local_bge_small
from config.llm import deepseek_llm
from tool.file_tool import getRootPath

root_path = getRootPath()

#加载一些文档
documents = SimpleDirectoryReader(root_path + '/data').load_data()

# 在 v0.10.0之后，使用Setting来设置全局的LLM和Embedding模型
Settings.llm = deepseek_llm()
Settings.embed_model = embed_model_local_bge_small()

#初始化chroma客户端，并设置保存数据的路径
chroma_db = chromadb.PersistentClient(path=root_path + '/chroma_db')

#创建correction
chroma_correction = chroma_db.get_or_create_collection("quickstart")

#将 chroma 分配为上下文中的vector_store
vector_store = ChromaVectorStore(chroma_collection=chroma_correction)
storage_context = StorageContext.from_defaults(vector_store=vector_store)

#构建索引
vector_index = VectorStoreIndex.from_documents(documents, storage_context=storage_context)
query_engine = vector_index.as_query_engine()
response = query_engine.query("员工的年假有几天？")
print(response)