#https://blog.csdn.net/adfyvatbia/article/details/143443526
import os
from langchain_community.chat_models.tongyi import ChatTongyi
from langchain_core.messages import HumanMessage, SystemMessage
from langgraph.checkpoint.memory import MemorySaver
from langgraph.graph import START, MessagesState, StateGraph
 

from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain_core.runnables.history import RunnableWithMessageHistory
from langchain_community.chat_message_histories import SQLChatMessageHistory
from langchain_community.chat_message_histories import PostgresChatMessageHistory
from langchain_community.llms import Tongyi
from llm import model
chatLLM = model.ty()


class Appc:
    
    def __init__(self):
        prompt = ChatPromptTemplate.from_messages(
            [
                ("system", "你是一个行业专家"),
                MessagesPlaceholder(variable_name="history"),
                ("human", "{question}"),
            ]
        )

        # 创建一个聊天链
        chain = prompt | chatLLM

        # 初始化带有消息历史记录的可执行对象
        chain_with_history = RunnableWithMessageHistory(
            chain,
            lambda session_id: PostgresChatMessageHistory(
                session_id=session_id, connection_string="postgresql://postgres:123456@localhost:5432/aihis"
            ),
            input_messages_key="question",
            history_messages_key="history",
        )
        self.app=chain_with_history
    def wf(self):
        return self.app 
 
appc=Appc();

def ask(qustion,kid):
    chain_with_history=appc.wf()
    config = {"configurable": {"session_id": kid}}
    response = chain_with_history.invoke({"question":qustion}, config=config)
    print(response) 

def test(): 
    ask("桂枝汤的作用是什么？",kid="1234567890")
    ask("副作用",kid="1234567890")

