import streamlit as st
from langchain_community.llms import Tongyi
from langchain_core.prompts import PromptTemplate
from langchain_core.output_parsers import StrOutputParser

st.set_page_config(page_title="Chatbot")

# 初始化 Tongyi 模型
llm = Tongyi(
    model="qwen-plus",
    dashscope_api_key='sk-24686fb38c3648eb9dbc356c757c2678',
    base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",  # 填写DashScope服务endpoint
    temperature=0.1
)

with st.sidebar:
    st.title('Welcome To ChatBot')
    st.sidebar.button('Clear Chat History')

def clear_chat_history():
    st.session_state.history = [{"role": "assistant", "content": "How may I assist you today?"}]
    st.sidebar.button('Clear Chat History', on_click=clear_chat_history)

# 创建一个连续问答的会话
if "history" not in st.session_state:
    st.session_state.history = []

st.title("连续问答应用")


def get_response(user_input):
    # 将用户输入和历史记录结合起来生成提示
    prompt = PromptTemplate(
        template="对话历史记录：{history}\n用户：{user_input}\nAI：",
        input_variables=["history", "user_input"]
    )

    # 使用历史记录生成提示
    formatted_prompt = prompt.format(
        history="\n".join(st.session_state.history),
        user_input=user_input
    )

    # 获取 AI 的响应
    response = llm(formatted_prompt)

    # 解析响应
    parser = StrOutputParser()
    parsed_response = parser.parse(response)

    st.session_state.history.append(f"您：{user_input}")
    st.session_state.history.append(f"Bot：{parsed_response}")

    return parsed_response


# 用户输入框
user_input = st.text_input("请输入您的问题：")

if user_input:
    response = get_response(user_input)
    st.write(f"AI：{response}")


# 显示对话历史记录
if st.session_state.history:
    st.write("对话历史记录：")
    for item in st.session_state.history:
        st.write(item)
