from dotenv import load_dotenv
from langchain_openai import ChatOpenAI
from langchain.prompts import ChatPromptTemplate

load_dotenv()
prompt_ls = []


with open('res/sys_prompt.txt', 'r', encoding='utf-8') as f:
    sys_prompt = f.read().strip()

with open('res/user_chat_text.txt', 'r', encoding='utf-8') as f:
    user_chat_text = f.read().strip()

with open('res/fmt_prompt.txt', 'r', encoding='utf-8') as f:
    line = f.readline()
    while line:
        if line.strip():
            prompt_ls.append(('human', line.strip()))
        line = f.readline()
    prompt_ls.insert(0, ('system', sys_prompt))
    prompt_ls.append(('human', user_chat_text))

prompt_template = ChatPromptTemplate.from_messages(prompt_ls)

model = ChatOpenAI(
    model='glm-4-0520',
    openai_api_base="https://open.bigmodel.cn/api/paas/v4",
    max_tokens=300,
    temperature=0.3
)


chain = prompt_template | model


if __name__ == '__main__':

    answer = chain.invoke(input={'sector': '手表行业'})
    print(answer.content)
