import os

from langchain_chroma import Chroma
from langchain_core.documents import Document
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.runnables import RunnableLambda, RunnablePassthrough
from langchain_openai import ChatOpenAI, OpenAIEmbeddings

# langsmith 监控

os.environ['LANGCHAIN_TRACING_V2'] = "true"
os.environ['LANGCHAIN_API_KEY'] = '1123'

# 调用大模型

model = ChatOpenAI(model='gpt-4-turbo')

documents = [
    Document(
        page_content="dog is good boy, be famous for his zhongcheng",
        metadata={"source": " animals document"},
    ),
    Document(
        page_content="cat is good boy, be famous for his optimiaze",
        metadata={"source": " animals document"},
    ),
    Document(
        page_content="duck is good boy, be famous for his hangsome",
        metadata={"source": " animals document"},
    ),
]

# 实例化向量空间，可计算每个项目cos的值，可以做相似度搜索langchain——chrom langchain内置向量数据库
vector_stores = Chroma.from_documents(documents, embedding=OpenAIEmbeddings())
# vector_stores.similarity_search_with_score('coffee cat')#分数越低，越接近

retriver = RunnableLambda(vector_stores.similarity_search).bind(k=1)  # 相似度最高的第一个

# print(retriver.batch(['coffee cat','white dog']))

# 大模型和向量空间结合查询
# 提示模板
message = """
使用上下文仅回答这个问题：
{question}
上下文：
{context}
"""

prompt_template = ChatPromptTemplate.from_message([
    ('human', message),
])

# RunnablePassthrough允许我们将用户的问题之后再传递prompt
chain = {'question': RunnablePassthrough(), 'context': retriver} | prompt_template | model

resp = chain.invoke('please introduce cat')
