# -*- coding:utf-8 -*-
import gradio as gr
import logging
from langchain.chat_models import ChatOpenAI
from langchain.chat_models.base import BaseChatModel
from langchain.chat_models import ChatOpenAI
from langchain import PromptTemplate, LLMChain
from langchain.prompts.chat import (
    ChatPromptTemplate,
    SystemMessagePromptTemplate,
    AIMessagePromptTemplate,
    HumanMessagePromptTemplate,
)
from langchain.schema import (
    AIMessage,
    HumanMessage,
    SystemMessage
)

import openai, random, time
__version__ = "0.0.1"
openai.api_base = "http://localhost:8080/v1"
api_key = "token1"
MODELS = ["ggml-vicuna-13b-1.1"]
FOOTER = """<div class="versions">{versions}</div>"""

        
def get_chain(
    model_name,
    api_key=api_key,
    temperature=0.1,
    system_prompt_tmpls="You are a helpful assistant that translates english to pirate.",
    user_name=""
) -> LLMChain:
    chat = ChatOpenAI(model_name=model_name, 
        verbose=True, temperature=temperature, 
        openai_api_key=api_key,
        )
    system_message_prompt = SystemMessagePromptTemplate.from_template(system_prompt_tmpls)
    example_human = HumanMessagePromptTemplate.from_template("Hi")
    example_ai = AIMessagePromptTemplate.from_template("Argh me mateys")
    human_template="{text}"
    human_message_prompt = HumanMessagePromptTemplate.from_template(human_template)
    chat_prompt = ChatPromptTemplate.from_messages([system_message_prompt, example_human, example_ai, human_message_prompt])
    chain = LLMChain(llm=chat, prompt=chat_prompt)
    return chain

def user(user_message, history):
    return "", history + [[user_message, None]]

def predict(model_name, message, chat_history):
    chain = get_chain(model_name=model_name)
    bot_message = chain.run(message)
    chat_history.append((message, bot_message))
    return "", chat_history

with gr.Blocks(theme=gr.themes.Soft()) as demo:
    user_api_key = gr.State(api_key)
    user_question = gr.State("")
    model_name = gr.State(MODELS[0])
    with gr.Row():
        gr.HTML("对话测试", elem_id="app_title")
    with gr.Row().style(equal_height=True):
        with gr.Column(scale=5):
            with gr.Row():
                chatbot = gr.Chatbot(label="Demo Chat", elem_id="chatbot").style(height="100%")
            with gr.Row():
                with gr.Column(min_width=225, scale=12):
                    user_input = gr.Textbox(
                        elem_id="user_input_tb",
                        show_label=False, placeholder="在这里输入"
                    ).style(container=False)
                with gr.Column(min_width=42, scale=1):
                    submitBtn = gr.Button(value="发送", variant="primary", elem_id="submit_btn")
    gr.Markdown("""llmapi 测试""", elem_id="description")
    gr.HTML(FOOTER.format(versions=__version__), elem_id="footer")
    #ChatBot
    user_input.submit(user, [user_input, chatbot], [user_input, chatbot]).then(predict, inputs=[model_name, user_input, chatbot],
                      outputs=[user_input, chatbot], show_progress=False)
    submitBtn.click(predict, inputs=[model_name, user_input, chatbot],
                      outputs=[user_input, chatbot], show_progress=False)
if __name__ == "__main__":
    demo.launch() 
