from sentence_transformers import SentenceTransformer
import chromadb
from llm.doubao import doubao_qa
from llm.local import ollama_qa

model = SentenceTransformer("all-MiniLM-L6-v2")

client = chromadb.PersistentClient(path="./chromadb_data")
collection = client.get_or_create_collection("rag")


def get_query_embedding(query):
    return model.encode(query).tolist()


def retrieve_related_chunks(query_embedding, n_results=3):
    results = collection.query(query_embeddings=[query_embedding], n_results=n_results)
    related_chunks = results.get("documents")
    if not related_chunks or not related_chunks[0]:
        exit(1)
    return related_chunks[0]


def retrieve_related_chunks_multidimensional(query_embedding, n_results):
    results = collection.query(
        query_embeddings=[query_embedding],
        n_results=n_results,
        include=["documents", "metadatas", "distances"],
    )
    documents = results.get("documents", [])[0]
    metadatas = results.get("metadatas", [])[0]
    original_chunks = []
    for i, (doc, metadata) in enumerate(zip(documents, metadatas)):
        if metadata and "original_chunk" in metadata:
            original_chunk = metadata["original_chunk"]
            original_chunks.append(original_chunk)
        else:
            original_chunks.append(doc)
    """ unique_chunks = []
    seen_chunks = set()
    for chunk in original_chunks:
        chunk_hash = hash(chunk)
        if chunk_hash not in seen_chunks:
            seen_chunks.add(chunk_hash)
            unique_chunks.append(chunk) """
    seen_chunks = set()
    for chunk in original_chunks:
        if chunk not in seen_chunks:
            seen_chunks.add(chunk)
    return list(seen_chunks)


if __name__ == "__main__":
    query = "张伟的期望工资是多少?"
    query_embedding = get_query_embedding(query)
    print(f"query_embedding:{len(query_embedding)}")
    related_chunks = retrieve_related_chunks_multidimensional(
        query_embedding, n_results=5
    )
    print("related_chunks", related_chunks)
    query_context = "\n".join(related_chunks)
    print(f"query_context:{query_context}")
    prompt = f"已知信息:{query_context}\n\n请根据上述的内容回答用户下面的问题:{query}"
    print(f"prompt:{prompt}")
    answer = doubao_qa(prompt)
    print(f"answer:{answer}")
