import os import random from huggingface_hub import InferenceClient import gradio as gr from datetime import datetime import agent from models import models import urllib.request import uuid base_url="https://johann22-chat-diffusion.hf.space/" loaded_model=[] for i,model in enumerate(models): loaded_model.append(gr.load(f'models/{model}')) print (loaded_model) now = datetime.now() date_time_str = now.strftime("%Y-%m-%d %H:%M:%S") client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1") #model = gr.load("models/stabilityai/sdxl-turbo") history = [] def infer(txt): return (model(txt)) def format_prompt(message, history): prompt = "" for user_prompt, bot_response in history: prompt += f"[INST] {user_prompt} [/INST]" prompt += f" {bot_response} " prompt += f"[INST] {message} [/INST]" return prompt def run_gpt(in_prompt,history): prompt=format_prompt(in_prompt,history) seed = random.randint(1,1111111111111111) print (seed) generate_kwargs = dict( temperature=1.0, max_new_tokens=1048, top_p=0.99, repetition_penalty=1.0, do_sample=True, seed=seed, ) content = agent.GENERATE_PROMPT + prompt #print(content) stream = client.text_generation(content, **generate_kwargs, stream=True, details=True, return_full_text=False) resp = "" for response in stream: resp += response.token.text return resp def run(purpose,history,model_drop): if history: history=str(history).strip("[]") if not history: history = "" try: out_prompt = run_gpt(purpose,history) except Exception as e: out_prompt = f"An Error Occured generating the prompt \n {e}" yield ("",[(purpose,out_prompt)],None) try: model=loaded_model[int(model_drop)] out_img=model(out_prompt) print(out_img) image=f'{base_url}file={out_img}' uid = uuid.uuid4() urllib.request.urlretrieve(image, f'{uid}.png') return ("",[(purpose,out_prompt)],f'{uid}.png') except Exception as e: print (e) #return ("", [(purpose,history)]) return ("An Error Occured generating the image",[(purpose,out_prompt)],None) ################################################ with gr.Blocks() as iface: gr.HTML("""

Chat Diffusion


This chatbot will generate images

""") with gr.Row(): with gr.Column(): chatbot=gr.Chatbot() msg = gr.Textbox() model_drop=gr.Dropdown(label="Diffusion Models", type="index", choices=[m for m in models], value=models[0]) with gr.Row(): submit_b = gr.Button() stop_b = gr.Button("Stop") clear = gr.ClearButton([msg, chatbot]) sumbox=gr.Image(label="Image",type="filepath") sub_b = submit_b.click(run, [msg,chatbot,model_drop],[msg,chatbot,sumbox]) sub_e = msg.submit(run, [msg, chatbot,model_drop], [msg, chatbot,sumbox]) stop_b.click(None,None,None, cancels=[sub_b,sub_e]) iface.launch()