Spaces:
Running
Running
| from huggingface_hub import InferenceClient | |
| import streamlit as st | |
| from io import BytesIO | |
| from PIL import Image | |
| import requests | |
| import base64 | |
| import time | |
| import base64 | |
| if "draw_model" not in st.session_state: | |
| st.session_state.draw_model_list = { | |
| "鐜板疄-AbsoluteReality_v1.8.1":"https://api-inference.huggingface.co/models/digiplay/AbsoluteReality_v1.8.1", | |
| "鐜板疄-Absolute-Reality-1.81":"https://api-inference.huggingface.co/models/Lykon/absolute-reality-1.81", | |
| "鍔ㄦ极-AingDiffusion9.2":"https://api-inference.huggingface.co/models/digiplay/AingDiffusion9.2", | |
| "鐜板疄鍔ㄦ极-BluePencilRealistic_v01":"https://api-inference.huggingface.co/models/digiplay/bluePencilRealistic_v01", | |
| "鍔ㄦ极鍐欏疄-Counterfeit-v2.5":"https://api-inference.huggingface.co/models/gsdf/Counterfeit-V2.5", | |
| "鍔ㄦ极鍐欏疄-Counterfeit-v25-2.5d-tweak":"https://api-inference.huggingface.co/models/digiplay/counterfeitV2525d_tweak", | |
| "鍔ㄦ极鍙埍-Cuteyukimix":"https://api-inference.huggingface.co/models/stablediffusionapi/cuteyukimix", | |
| "鍔ㄦ极鍙埍-Cuteyukimixadorable":"https://api-inference.huggingface.co/models/stablediffusionapi/cuteyukimixadorable", | |
| "鐜板疄鍔ㄦ极-Dreamshaper-7":"https://api-inference.huggingface.co/models/Lykon/dreamshaper-7", | |
| "鐜板疄鍔ㄦ极-Dreamshaper_LCM_v7":"https://api-inference.huggingface.co/models/SimianLuo/LCM_Dreamshaper_v7", | |
| "鍔ㄦ极3D-DucHaitenDreamWorld":"https://api-inference.huggingface.co/models/DucHaiten/DucHaitenDreamWorld", | |
| "鐜板疄-EpiCRealism":"https://api-inference.huggingface.co/models/emilianJR/epiCRealism", | |
| "鐜板疄鐓х墖-EpiCPhotoGasm":"https://api-inference.huggingface.co/models/Yntec/epiCPhotoGasm", | |
| "鍔ㄦ极涓板瘜-Ether-Blu-Mix-b5":"https://api-inference.huggingface.co/models/tensor-diffusion/Ether-Blu-Mix-V5", | |
| "鍔ㄦ极-Flat-2d-Animerge":"https://api-inference.huggingface.co/models/jinaai/flat-2d-animerge", | |
| "鍔ㄦ极椋庢櫙-Genshin-Landscape-Diffusion":"https://api-inference.huggingface.co/models/Apocalypse-19/Genshin-Landscape-Diffusion", | |
| "鐜板疄鐓х墖-Juggernaut-XL-v7":"https://api-inference.huggingface.co/models/stablediffusionapi/juggernaut-xl-v7", | |
| "鐜板疄椋庢櫙-Landscape_PhotoReal_v1":"https://api-inference.huggingface.co/models/digiplay/Landscape_PhotoReal_v1", | |
| "鑹烘湳姘村ⅷ-MoXin":"https://api-inference.huggingface.co/models/zhyemmmm/MoXin", | |
| "鐜板疄鍐欏疄-OnlyRealistic":"https://api-inference.huggingface.co/models/stablediffusionapi/onlyrealistic", | |
| "鐜板疄-Realistic-Vision-v51":"https://api-inference.huggingface.co/models/stablediffusionapi/realistic-vision-v51", | |
| "鍒濆-StableDiffusion-2-1":"https://api-inference.huggingface.co/models/stabilityai/stable-diffusion-2-1", | |
| "鍒濆-StableDiffusion-XL-0.9":"https://api-inference.huggingface.co/models/stabilityai/stable-diffusion-xl-base-0.9", | |
| "鍔ㄦ极-TMND-Mix":"https://api-inference.huggingface.co/models/stablediffusionapi/tmnd-mix", | |
| "animagine-XL-3.0":"https://api-inference.huggingface.co/models/cagliostrolab/animagine-xl-3.0", | |
| "鑹烘湳-Zavychromaxl-v3":"https://api-inference.huggingface.co/models/stablediffusionapi/zavychromaxlv3", | |
| "Dalle-v1.1":"https://api-inference.huggingface.co/models/dataautogpt3/OpenDalleV1.1", | |
| "Dalle-3-xl":"https://api-inference.huggingface.co/models/openskyml/dalle-3-xl", | |
| "playground-v2-缇庡寲":"https://api-inference.huggingface.co/models/playgroundai/playground-v2-1024px-aesthetic", | |
| "Dalle-proteus-v0.2":"https://api-inference.huggingface.co/models/dataautogpt3/ProteusV0.2", | |
| } | |
| st.session_state.draw_model = st.session_state.draw_model_list["Dalle-v1.1"] | |
| st.session_state.image_choice = True | |
| st.session_state.image_choice_name = "Huggingface" | |
| show_app = st.container() | |
| def change_paramater(): | |
| st.session_state.draw_model = st.session_state.draw_model | |
| def image_choice(): | |
| if st.session_state.image_choice: | |
| st.session_state.image_choice = False | |
| st.session_state.image_choice_name = "Vispunk" | |
| else: | |
| st.session_state.image_choice = True | |
| st.session_state.image_choice_name = "Huggingface" | |
| def huggingface_text_to_image(text): | |
| client = InferenceClient(model=st.session_state.draw_model_list[st.session_state.draw_model]) | |
| image = client.text_to_image(text) | |
| return image | |
| def query_vispunk(prompt): | |
| def request_generate(prompt): | |
| url = "https://motion-api.vispunk.com/v1/generate/generate_image" | |
| headers = {"Content-Type": "application/json"} | |
| data = {"prompt": prompt} | |
| try: | |
| response = requests.post(url, headers=headers, json=data) | |
| return True,response.json()["task_id"] | |
| except Exception as e: | |
| st.error(f"Error: {e}") | |
| return False,None | |
| def request_image(task_id): | |
| url = "https://motion-api.vispunk.com/v1/generate/check_image_task" | |
| headers = {"Content-Type": "application/json"} | |
| data = {"task_id": task_id} | |
| try: | |
| response = requests.post(url, headers=headers, json=data) | |
| return True,response.json()["images"][0] | |
| except Exception as e: | |
| return False,e | |
| flag_generate,task_id = request_generate(prompt) | |
| if flag_generate: | |
| while True: | |
| flag_wait,image_src = request_image(task_id) | |
| if not flag_wait: | |
| time.sleep(1) | |
| else: | |
| image_data = base64.b64decode(image_src) | |
| image = BytesIO(image_data) | |
| return True,image | |
| else: | |
| return False,task_id | |
| def main(prompt): | |
| show_app.write("**You:** " + prompt) | |
| if st.session_state.image_choice: | |
| image = huggingface_text_to_image(prompt) | |
| else: | |
| flag,image = query_vispunk(prompt) | |
| show_app.image(image,caption=prompt,use_column_width=True) | |
| with st.sidebar: | |
| st.session_state.image_choice = st.toggle(st.session_state.image_choice_name,value=st.session_state.image_choice,on_change=image_choice) | |
| if st.session_state.image_choice: | |
| st.session_state.draw_model = st.selectbox('Draw Models', sorted(st.session_state.draw_model_list.keys(),key=lambda x:x.split("-")[0]),on_change=change_paramater) | |
| else: | |
| pass | |
| prompt = st.chat_input("Send your prompt") | |
| if prompt: | |
| main(prompt) |