from langchain_chroma import Chroma
from langchain_community.embeddings import ModelScopeEmbeddings
from langchain_core.documents import Document
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.runnables import RunnableLambda, RunnablePassthrough
from langchain_openai import OpenAIEmbeddings, ChatOpenAI
from langchain.globals import set_debug, get_debug
# from langchain_openai import OpenAIEmbeddings
# from modelscope import AutoModel, AutoTokenizer
# import torch
set_debug(True)
documents = [
    Document(
        page_content="Dogs are great companions, known for their loyalty and friendliness.",
        metadata={"source": "mammal-pets-doc"},
    ),
    Document(
        page_content="Cats are independent pets that often enjoy their own space.",
        metadata={"source": "mammal-pets-doc"},
    ),
    Document(
        page_content="Goldfish are popular pets for beginners, requiring relatively simple care.",
        metadata={"source": "fish-pets-doc"},
    ),
    Document(
        page_content="Parrots are intelligent birds capable of mimicking human speech.",
        metadata={"source": "bird-pets-doc"},
    ),
    Document(
        page_content="Rabbits are social animals that need plenty of space to hop around.",
        metadata={"source": "mammal-pets-doc"},
    ),
]

openAIEmbeddings=OpenAIEmbeddings(api_key="sk-CftUbVSsA61lwwgMz9xvt6znTunQZfgBP8ZCVLbQsKfXUR6k",
    model='text-embedding-3-small',
    base_url="https://www.henapi.top/v1")

vectorstore = Chroma.from_documents(documents, embedding=openAIEmbeddings)

# resp=vectorstore.similarity_search("cat")
# print(resp)

# resp=vectorstore.similarity_search_with_score("cat")
# print(resp)

# retriever=RunnableLambda(vectorstore.similarity_search).bind(k=1)
# resp=retriever.batch(["cat", "shark"])
# print(resp)

retriever=vectorstore.as_retriever(search_type="similarity",search_kwargs={"k":1})
# resp=retriever.batch(["cat", "lion"])
# print(resp)

llm = ChatOpenAI(
    api_key="sk-CftUbVSsA61lwwgMz9xvt6znTunQZfgBP8ZCVLbQsKfXUR6k",
    model='deepseek-ai/DeepSeek-V3',
    base_url="https://www.henapi.top/v1"
)

message = """
Answer this question using the provided context only.

{question}

Context:
{context}
"""

prompt = ChatPromptTemplate.from_messages([("human", message)])

rag_chain = {"context": retriever, "question": RunnablePassthrough()} | prompt | llm

response = rag_chain.invoke("tell me about cats")

print(response.content)