from langchain.chains import ConversationChain
from langchain.memory import ConversationBufferMemory
from langchain.llms import Ollama
from typing import Optional
from pydantic import BaseModel, Field


# 初始化 Ollama LLM
llm = Ollama(base_url="https://test-pod67a308ea653fb15d7474d3d4-11434.node.inscode.run", model="deepseek-r1:70b")

# 初始化对话记忆
memory = ConversationBufferMemory()

# 创建对话链
conversation = ConversationChain(llm=llm, memory=memory)


class Joke(BaseModel):
    """Joke to tell user."""

    setup: str = Field(description="The setup of the joke")
    punchline: str = Field(description="The punchline to the joke")
    rating: Optional[int] = Field(
        default=None, description="How funny the joke is, from 1 to 10"
    )

# 终端交互函数
def chat():
    print("欢迎使用 Ollama 终端聊天助手！输入 '退出' 结束对话。")
    while True:
        # 获取用户输入
        user_input = input("你: ")
        if user_input.lower() in ["退出", "exit", "quit"]:
            print("助手: 再见！")
            break

        # 调用对话链
        response = conversation.run(user_input)
        print(f"助手: {response}")


# 运行聊天
if __name__ == "__main__":
    chat()
