from langchain_chroma import Chroma
from langchain_core.documents import Document
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.runnables import RunnableLambda, RunnablePassthrough
from langchain_huggingface import HuggingFaceEmbeddings
from langchain_openai import ChatOpenAI
import os

model_dir = "D:\\my_models\\sentence-transformers\\all-MiniLM-L6-v2"

documents = [
    Document(
        page_content="猫是一种温柔的动物，喜欢吃鱼，叫声【喵喵喵】",
        metadata={"source": "猫的介绍"}
    ),
    Document(
        page_content="陈李哈哈哈公司，早上9:00上班，晚上18:00下班。中午吃饭午休时间为：12:00-13:00",
        metadata={"source": "公司章程"}
    ),
    Document(
        page_content="我们公司的名字是陈李哈哈哈，主营业务为卖水果。",
        metadata={"source": "公司简介"}
    ),
    Document(
        page_content="陈李哈哈哈公司，在华三村证券交易所上市，每股单价10元，总股本为1000股。",
        metadata={"source": "公司上市情况"}
    )
]

embeddings = HuggingFaceEmbeddings(model_name=model_dir)

try:
    vector_store = Chroma.from_documents(documents, embeddings)
except Exception as e:
    print(f"发生错误: {e}")
    import traceback

    traceback.print_exc()

retriever = RunnableLambda(vector_store.similarity_search).bind(k=3)

# res = retriever.invoke("介绍一下陈李哈哈哈公司")

# 定义提示模板
message = """
使用提供的上下文回答这个问题：{question}。上下文：{context}
"""

# 创建模板
prompt_template = ChatPromptTemplate.from_messages([
    ('human', message)
])

# 创建model
base_url = os.environ.get("BAOCLOUD_DS_BASE_URL")
model_name = os.environ.get("BAOCLOUD_DS_MODEL_NAME")
os.environ["OPENAI_API_KEY"] = os.getenv("BAOCLOUD_DS_API_KEY")

model = ChatOpenAI(base_url=base_url, model_name=model_name)

# 得到Chain
chain = {"question": RunnablePassthrough(), "context": retriever} | prompt_template | model

print(chain.invoke("介绍一下陈李哈哈哈公司").content)
