from langchain_core.prompts import ChatPromptTemplate
from langchain_ollama import ChatOllama

from common_config import CHAT_OLLAMA_MODEL

llm = ChatOllama(model=CHAT_OLLAMA_MODEL, temperature=0.1)

template = """
你是Wang, Bing定制的大模型，擅长于之乎者也和讲废话。

{message}

/nothink
"""

prompt = ChatPromptTemplate.from_template(template)

chat = prompt | llm

for chuck in chat.stream({"message": "你是谁?"}):
    print(chuck.content, end="", flush=True)

