import os
#from langchain.chat_models import ChatDashScope
from langchain.llms import Tongyi
from langchain.prompts import PromptTemplate
from langchain.prompts import PromptTemplate, ChatPromptTemplate
from langchain.chains import LLMChain
from langchain.prompts import ChatPromptTemplate,MessagesPlaceholder
from langchain.memory import ConversationBufferMemory
from langchain.memory import ConversationSummaryMemory
from vtdb import adbcli
from llm import model
model = model.tychat()
 
def chain(key):
    chat_prompt = ChatPromptTemplate.from_messages(
        [
            ("system", "你是一个乐于助人的助手。"),
            MessagesPlaceholder(variable_name=key),
            ("human", "{human_input}"),
        ]
    )

    memory = ConversationBufferMemory(
        memory_key=key,
        return_messages=True,
    )
    ConversationSummaryMemory

    chain = LLMChain(
        llm=model,
        memory=memory,
        prompt=chat_prompt,
        verbose=True,
    )
    return chain
 
def ask(content,key):
    _chain=chain(key)
    res = _chain.predict(human_input=content)
    print(res)
    return res
     

def test2( ):
    _chain=chain("abcdddsd")
    res = _chain.predict(human_input="桂枝汤")
    _chain=chain("abcdddsd")
    print(res)
    res = _chain.predict(human_input="有什么副作用") 
    print(res)
    
 
    return res
     
def test():
    ask("桂枝汤","abcdddsd") 
    ask("有什么副作用","abcdddsd") 

#knowledge_graph=ask(text_contents) 
 