import os

from langchain_community.llms import Ollama
from langchain_core.prompts import PromptTemplate

llm = Ollama(model="llama3", base_url="http://localhost:11434")


def chat_with_ollama(subject: str):
    #使用utf-8编码加载文件
    template = PromptTemplate.from_file('prompt_template.txt',encoding="utf-8")
    print("=======template=========")
    print(template)
    prompt = template.format(subject=subject)
    print("=======prompt=========")
    print(prompt)
    response = llm(prompt)
    return response
if __name__ == '__main__':
    print(chat_with_ollama("白雪公主"))