import os

from langchain_community.llms import Ollama
from langchain_core.prompts import PromptTemplate

llm = Ollama(model="llama3", base_url="http://localhost:11434")

def chat_with_ollama(story: str):
    product_description = """给我讲个关于{story}的笑话"""
    template = PromptTemplate.from_template(product_description)
    print("=======template=========")
    print(template)
    prompt = template.format(story=story)
    print("=======prompt=========")
    print(prompt)
    response = llm(prompt)
    return response
if __name__ == '__main__':
    print(chat_with_ollama("白雪公主"))