import os

import gradio as gr
import openai
from openai import OpenAI
import base64

print(gr.__version__)

api_key='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'
openai_model = 'gpt-4o'
prompt = ""

def encode_iamge(image_path):
    with open(image_path,'rb') as image_file:
        return base64.b64encode(image_file.read()).decode('utf-8')

def openai_completion(
    prompt,
    message_history,
    temperature=0.9,
    max_tokens=150,
    top_p=1,
    frequency_penalty=0,
    presence_penalty=0.6,
    sysprompt = None,
    imagePath= None
):
    
    client = OpenAI(api_key=api_key)
    openai.api_key = api_key
    message = message_history
    
    if sysprompt=='':
        message.append( {
                        "role": "system",
                        "content": sysprompt
                    })
    uploadMessage = {}
    uploadMessage['role'] = 'user'
    
    
    
    if imagePath is not None and os.path.exists(imagePath) and (imagePath.endswith('jpg') or imagePath.endswith('png')): 
        content = []
        content.append({'type':'text','text':prompt})
        
        imageType = imagePath.split('.')[1]
        imageData = encode_iamge(imagePath)
        
        content.append({'type':'image_url','image_url':{'url':f'data:image/{imageType};base64,{imageData}'}})
        
        uploadMessage['content'] = content
    
    else:
        uploadMessage['content'] = prompt
       
    
    message.append(uploadMessage)
    response = client.chat.completions.create(
        model=openai_model,
        messages=message,
        temperature=temperature,
        max_tokens=max_tokens,
        top_p=top_p,
        frequency_penalty=frequency_penalty,
        presence_penalty=presence_penalty,
    )

    message = response.choices[0].message

    # return (msg.role,msg.content)
    return message

def chatgpt(
    prompt,
    chat_history,
    message_history,
    temperature,
    max_tokens,
    top_p,
    frequency_penalty,
    presence_penalty,
    sysprompt,
    image_path,
):
    chat_history = chat_history or []
    message_history = message_history or []
   
    while len(message_history) > 4:
       del message_history[0]

        

    # create the output with openai
    message = openai_completion(
        prompt,
        message_history,
        temperature,
        max_tokens,
        top_p,
        frequency_penalty,
        presence_penalty,
        sysprompt,
        image_path,
    )
    chat_history.append((prompt,message.content))
    message_history.append({'role':message.role,'content':message.content})
    return chat_history,chat_history, message_history, "",None


with gr.Blocks(title="Chat with GPT-4o") as block:
    gr.Markdown("## Chat with GPT-4o")
    with gr.Row():
        with gr.Column():
            temperature = gr.Slider(label="Temperature", minimum=0, maximum=1, step=0.1, value=0.9)
            max_tokens = gr.Slider(label="Max Tokens", minimum=10, maximum=400, step=10, value=150)
            top_p = gr.Slider(label="Top P", minimum=0, maximum=1, step=0.1, value=1)
            frequency_penalty = gr.Slider(
                label="Frequency Penalty",
                minimum=0,
                maximum=1,
                step=0.1,
                value=0,
            )
            presence_penalty = gr.Slider(
                label="Presence Penalty",
                minimum=0,
                maximum=1,
                step=0.1,
                value=0.6,
            )
            
            imageFileInput = gr.components.File(label="上传图片")

        with gr.Column():
            chatbot = gr.Chatbot()
            systemmessage = gr.Textbox(value=prompt, label="指派AI的角色，例如：你是一个数学家.也可以为空")
            message = gr.Textbox(value=prompt, label="输入你的问题")
            chatstate = gr.State()
            messagestate = gr.State()
            # message.submit(
            #     fn=chatgpt3,
            #     inputs=[
            #         message,
            #         state,
            #         temperature,
            #         max_tokens,
            #         top_p,
            #         frequency_penalty,
            #         presence_penalty,
            #     ],
            #     outputs=[chatbot, state, message],
            # )
            submit = gr.Button("Send")
            submit.click(
                chatgpt,
                inputs=[
                    message,
                    chatstate,
                    messagestate,
                    temperature,
                    max_tokens,
                    top_p,
                    frequency_penalty,
                    presence_penalty,
                    systemmessage,
                    imageFileInput,
                ],
                outputs=[chatbot, chatstate, messagestate,message,imageFileInput],
            )

if __name__ == "__main__":
       
    # block.launch(debug=True,share=True)
    block.launch(debug=True,share=True,server_name="0.0.0.0",server_port=8800)