from database import get_faiss, get_cursor
from call_model import call_embedding_model, call_llm_model


faiss_path = "/workspace/database/index.faiss"
dataset_path = '/workspace/database/dataset.db'

index = get_faiss(faiss_path)
conn, cursor = get_cursor(dataset_path)



def build_prompt(query, contents):
    prompt = f"你是一个问答助手，请根据以下资料回答问题。\n\n"
    for i, doc in enumerate(contents):
        prompt += f"资料{i+1}：{doc}\n\n"
    prompt += f"问题：{query}\n\n"
    prompt += "请基于以上资料给出准确简洁的回答。\n"
    return prompt

def search(query_text):
    query_embedding = call_embedding_model(query_text)
    if query_embedding is None:
        import sys
        print("embedding模型执行错误")
        return False

    D, I = index.search(query_embedding, k=3) # 取最接近的3个值
    print(D, I)

    retrived_content = []
    retrived_path = []
    for id_ in I[0]:
        sql_query = "SELECT * FROM origin WHERE id = ?"
        cursor.execute(sql_query, (int(id_), ))

        row = cursor.fetchone()

        retrived_path.append(row[1])
        retrived_content.append(row[2])
        # print(row[2])

    conn.close()

    prompt = build_prompt(query_text, retrived_content)
    # print("Prompt:\n", prompt)

    response = call_llm_model(prompt)
    print("\nAnswer:\n", response)
    print("\参考资料:\n", retrived_path)


# 检索
# query_text = "参数查询的流程是什么？"
query_text = "探亲假怎么请？"

search(query_text)