# from knowledge_based_chatglm import answer_chatglm, answer_chatglm_private
# 本文件在接入时请勿乱来
import gradio as gr

from Server import DJModel1, DJModel2, OrgModel1, OrgModel2
import nltk

nltk.data.path.append('nltk_data')


# 这是开启前端界面
def answer(ask, history, choose, choose_model):
    if len(history) > 16:
        history.pop(0)

    if choose == "chatglm-pro":
        if choose_model == "党建助手":
            return DJModel1.answer(ask, history)
        elif choose_model == "原生模式":
            return OrgModel1.answer(ask,ask, history)
    elif choose == "chatgpt3":
        if choose_model == "党建助手":
            return DJModel1.answer(ask, history)
        elif choose_model == "原生模式":
            return OrgModel1.answer(ask,ask, history)
    # elif choose == "文心一言":
    #     return botmodel(enire, choose_model, ask, history)
    # elif choose == "通义千问":
    #     return botmodel(qianwen, choose_model, ask, history)


def botmodel(chatbot, choose_model, ask, history):
    return chatbot.answer(ask, history)

    # elif choose_model == "AI咨询师":
    #     return chatbot.answer_private(ask, history)


def startgr(port):
    with gr.Blocks() as demo:
        chatbot = gr.Chatbot()
        msg = gr.Textbox()
        choose = gr.Dropdown(["chatglm-pro","chatgpt3"],
                             label="选择基础模型", value="chatglm-pro")
        choose_model = gr.Dropdown(["党建助手", "原生模式"], label="咨询模式", value="党建助手")

        def respond(message, history, choose, choose_model):
            allres = answer(message, history, choose, choose_model)
            for chat_history in allres:
                yield "", chat_history

        msg.submit(respond, [msg, chatbot, choose, choose_model], [msg, chatbot])

    demo.launch(server_name="0.0.0.0", server_port=port)


startgr(7860)
