import os
os.environ['LANGFUSE_SECRET_KEY'] = 'sk-lf-a6e41239-5d2d-4991-a218-fc2baa41a417'
os.environ['LANGFUSE_PUBLIC_KEY'] = 'pk-lf-32069f3f-ed6e-4d0b-b385-d03847f4bf18'
os.environ['LANGFUSE_HOST'] = 'https://us.cloud.langfuse.com'
# sk-lf-a6e41239-5d2d-4991-a218-fc2baa41a417  secritkey
# pk-lf-32069f3f-ed6e-4d0b-b385-d03847f4bf18  pk
# https://us.cloud.langfuse.com host

import chromadb

from llama_index.core import VectorStoreIndex,StorageContext
from llama_index.core import SimpleDirectoryReader,Settings
from llama_index.core.node_parser import SentenceSplitter
from llama_index.vector_stores.chroma import ChromaVectorStore
from llama_index.llms.ollama import Ollama
from llama_index.embeddings.ollama import OllamaEmbedding
from llama_index.core.callbacks import (
    CallbackManager,
    LlamaDebugHandler,
CBEvent
)

#设置模型
Settings.llm = Ollama(model="qwen:1.8b")
Settings.embed_model = OllamaEmbedding(model_name="milkey/dmeta-embedding-zh:f16")
llama_debug = LlamaDebugHandler(print_trace_on_end=True)
callback_manager = CallbackManager([llama_debug])

#加载与读取文档
reader = SimpleDirectoryReader(input_files=["F:\\datas\\nlp\\jianli\\jianli.txt"])
documents = reader.load_data()

#分割文档
node_parser = SentenceSplitter(chunk_size=500, chunk_overlap=20)
nodes = node_parser.get_nodes_from_documents(documents, show_progress=False)
llama_debug.flush_event_logs()
#准备向量存储
chroma = chromadb.HttpClient(host="localhost", port=8000)
# chroma.delete_collection(name="ragdb")
collection = chroma.get_or_create_collection(name="ragdb", metadata={"hnsw:space": "cosine"})
vector_store = ChromaVectorStore(chroma_collection=collection)

#准备向量存储索引
storage_context = StorageContext.from_defaults(vector_store=vector_store)
index = VectorStoreIndex(nodes,storage_context=storage_context)
idx = VectorStoreIndex.from_documents(documents=documents,callback_manager = callback_manager)
print(idx.as_retriever().retrieve("vllm推理工具"))

#构造查询引擎
query_engine = index.as_query_engine()


while True:
    user_input = input("问题：")
    if user_input.lower() == "exit":
        break

    response = query_engine.query(user_input)
    print("AI助手：", response.response)
