from langchain_ollama import ChatOllama
from langchain.prompts import PromptTemplate
from langchain.chains.llm import LLMChain

# 初始化 Ollama
ollama = ChatOllama(
    base_url="http://localhost:11434",
    model="qwen2.5:0.5b",
    temperature=0.7
)

# 创建提示模板
prompt = PromptTemplate(
    input_variables=["subject"],
    template="你是一位老师，请用通俗的语言讲解一下的概念：{subject}"
)

# 创建 LLM Chain
# chain = LLMChain(
#     llm=ollama,
#     prompt=prompt
# )
chain = prompt | ollama

def ask_teacher(question):
    """向老师提问并获取回答"""
    response = chain.invoke({"subject": question})
    return response.content

# 使用示例
if __name__ == "__main__":
    print("欢迎来到AI老师问答系统！(输入 'q' 退出)")
    while True:
        question = input("\n请输入您的问题: ").strip()
        if question.lower() == 'q':
            print("感谢使用，再见！")
            break
        if not question:
            print("问题不能为空，请重新输入！")
            continue
            
        print("\n正在思考...")
        answer = ask_teacher(question)
        print("\n老师的回答:")
        print("-" * 50)
        print(answer)
        print("-" * 50)
