from langchain.chains import LLMChain
#提示词的模版
from langchain import PromptTemplate
#引入百度千帆平台的大语言模
from langchain_community.llms import Ollama

def get_response_from_modelname(question:str,ollama_model_name:str):
    llm = Ollama(model=ollama_model_name)

    template = """问题: {question}
        回答: 请一步一步思考，然后回答."""

    prompt = PromptTemplate(template=template, input_variables=["question"])
    llm_chain = LLMChain(prompt=prompt, llm=llm)

    response = llm_chain.run(question)
    # response = llm.invoke(question)
    return response

def get_response_from18(question:str):
    return get_response_from_modelname(question=question,ollama_model_name='qwen:1.8b')


def get_response_from14(question:str):
    return get_response_from_modelname(question=question, ollama_model_name='qwen:14b')

def get_response_from32(question:str):
    return get_response_from_modelname(question=question, ollama_model_name='qwen:32b')