from langchain_community.chat_models import ChatTongyi
from langchain_core.prompts.prompt import PromptTemplate
from langchain.chains import (TransformChain, LLMChain, SimpleSequentialChain)

import os

os.environ["DASHSCOPE_API_KEY"] = "sk-9d8f1914800e497f8717144e860f99bc"
# 定义模型
llm = ChatTongyi()


# 定义转换函数
def transform_func(inputs: dict) -> dict:
    text = inputs["text"]
    shortened_text = "印度历届总统"
    return {"output_text": shortened_text}


template = """总结这些文本,告诉我深层次意义:
{output_text}
总结:"""
prompt = PromptTemplate(input_variables=["output_text"], template=template)

# 仅调用转换链，不直接和模型交互
transform_chain = TransformChain(
    input_variables=["text"], output_variables=["output_text"], transform=transform_func, prompt=prompt
)
# 加上llmChain、顺序链
llmChain = LLMChain(llm=llm, prompt=prompt)

sequential_chain = SimpleSequentialChain(chains=[transform_chain, llmChain],
                                         input_key="text",
                                         output_key="Comment",
                                         verbose=True)
res = sequential_chain.invoke("巴西历届总统")
print(res)
