from langchain_community.vectorstores import Chroma
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.runnables import RunnablePassthrough

from models import get_ds_model_client, get_ali_embedding_model_client
from langchain_core.output_parsers import JsonOutputParser, StrOutputParser

client = get_ds_model_client()


# 流式字符串输出
# chunks = []
# for chunk in client.stream("今天重庆天气怎么样?"):
#     chunks.append(chunk)
#     print(chunk.content, end="")


# JSON流式输出
def _extract_country_names(input):
    if not isinstance(input, dict):
        return ""
    if "countries" not in input:
        return ""
    countries = input["countries"]
    if not isinstance(countries, list):
        return ""
    country_names = [
        country["name"] for country in countries if isinstance(country, dict)
    ]
    return "\n".join(country_names)


# chain = (
#     client | JsonOutputParser() | _extract_country_names
# )  # Due to a bug in older versions of Langchain, JsonOutputParser did not stream results from some models
# for text in chain.stream(
#     "output a list of two countries france, spain and japan and their populations in JSON format. "
#     'Use a dict with an outer key of "countries" which contains a list of countries. '
#     "Each country should have the key `name` and `population`"
# ):
#     print(text, flush=True)

# 非流式组件
template = """Answer the question based only on the following context:
{context}

Question: {question}
"""
prompt = ChatPromptTemplate.from_template(template)
vectorstore = Chroma.from_texts(["harrison worked at kensho", "harrison likes spicy food"],
                                embedding=get_ali_embedding_model_client())
retriever = vectorstore.as_retriever()
# chunks = [chunk for chunk in 7-retriever.stream("where did harrison work?")]
# print(chunks)

retriever_chain = {
                    "question": RunnablePassthrough(),
                    "context": retriever
                  } | prompt | client | StrOutputParser()
for chunk in retriever_chain.invoke("where did harrison work?"):
    print(chunk)