from langchain_openai import OpenAIEmbeddings
from langchain_redis import RedisConfig, RedisVectorStore
from langchain_core.prompts import ChatPromptTemplate

from config.load_key import load_key

embedding_model = OpenAIEmbeddings(
    model="Qwen/Qwen3-Embedding-0.6B",
    api_key=load_key("siliconflow_api_key"),
    base_url=load_key("siliconflow_base_url"),
)

# 向量数据持久化
config = RedisConfig(
    index_name="langchain_test_index",
    redis_url=load_key("REDIS_URL"),
)
vector_store = RedisVectorStore(embedding_model, config)
vector_store.add_texts(["香蕉很长", "苹果很甜", "西瓜又大又圆"])
scored_results = vector_store.similarity_search_with_score("西瓜", k=3)
for doc, score in scored_results:
    print(f"{doc.page_content} - {score}")

# 构建检索器，类型为 similarity，检索的文档数为 3
retriever = vector_store.as_retriever(search_type="similarity", search_kwargs={"k": 3})
print(retriever.invoke("长长的水果是什么？"))





# 链式使用 Retriver
# 创建提示词模板
prompt = ChatPromptTemplate.from_messages([("human", "{question}")])


# 格式转换函数，prompt.invoke方法返回 PromptValue，而 retriver.invoke需要传入的参数为字符串。中间做个格式转换
def format_prompt_value(prompt_value):
    return prompt_value.to_string()

retriever_chain = prompt | format_prompt_value | retriever

documents = retriever_chain.invoke({"question": "又长又甜的水果是什么？"})
for doc in documents:
    print(doc.page_content)


