# -*- coding utf-8 -*-
"""
作者: 
日期: 年月日
"""
#from langchain.chat_models import ChatOpenAI
from langchain_community.chat_models import ChatOpenAI
from langchain.prompts import ChatPromptTemplate
from langchain.prompts import PromptTemplate
from langchain.chains import LLMChain


def return_llm_stream(query):
    model = "Qwen1.5-7B-Chat"
    temperature = 0.7
    api_key = "EMPTY"
    base_url = "http://192.168.0.194:20000/v1"
    llm = ChatOpenAI(model=model, temperature=temperature, api_key=api_key, base_url=base_url, streaming=True,
                     max_tokens=2048)

    prompt = PromptTemplate(
        input_variables=["query"],
        # template="What is a good {xxx} for a company that makes {product}?",
        template="你是一个专业的AI助手，请回答{query}",
    )


    llm_chain = prompt | llm

    ret = llm_chain.stream({'query':query})

    for token in ret:
        print(token)
        yield token
