from langchain.chains.conversation.base import ConversationChain
from langchain.memory import ConversationBufferMemory
from langchain_community.llms.ollama import Ollama
from wxauto import *

# wx = WeChat()  # 获取当前微信客户端
# wx.GetSessionList()  # 获取会话列表

model = ['phi3', 'llava-phi3', 'dolphin-llama3']

ollama = Ollama(
    base_url='http://localhost:11434',
    model=model[2]
)

memory = ConversationBufferMemory()

import time


def activate_function():
    print("刷新")
    ollama.invoke('')
    pass


if __name__ == '__main__':
    while True:
        activate_function()
        time.sleep(300)
