File size: 5,025 Bytes
35082a5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
import torch
import gradio as gr
from diffusers import StableDiffusionPipeline
from diffusers import DDIMScheduler,EulerDiscreteScheduler,EulerAncestralDiscreteScheduler,UniPCMultistepScheduler
from diffusers import  KDPM2DiscreteScheduler,KDPM2AncestralDiscreteScheduler,PNDMScheduler,StableDiffusionPipeline
from diffusers import DPMSolverMultistepScheduler
import random


def set_pipeline(model_id_repo,scheduler):

    model_ids_dict = {
    "pokemon": "yashAI007/pokemon"
}
    model_id = model_id_repo
    model_repo = model_ids_dict.get(model_id)
    print("model_repo :",model_repo)


    pipe = StableDiffusionPipeline.from_pretrained(
        model_repo,
        # torch_dtype=torch.float16,                               # to run on cpu
        use_safetensors=True,
    ).to("cpu")

    # pipe = StableDiffusionPipeline.from_pretrained(
    #     model_repo,
    #     torch_dtype=torch.float16,                               # to run on Gpu
    #     use_safetensors=True,
    # ).to("cuda")


    scheduler_classes = {
        "DDIM": DDIMScheduler,
        "Euler": EulerDiscreteScheduler,
        "Euler a": EulerAncestralDiscreteScheduler,
        "UniPC": UniPCMultistepScheduler,
        "DPM2 Karras": KDPM2DiscreteScheduler,
        "DPM2 a Karras": KDPM2AncestralDiscreteScheduler,
        "PNDM": PNDMScheduler,
        "DPM++ 2M Karras": DPMSolverMultistepScheduler,
        "DPM++ 2M SDE Karras": DPMSolverMultistepScheduler,
    }

    sampler_name = scheduler  # Example sampler name, replace with the actual value
    scheduler_class = scheduler_classes.get(sampler_name)

    if scheduler_class is not None:
        print("sampler_name:",sampler_name)
        pipe.scheduler = scheduler_class.from_config(pipe.scheduler.config)
    else:
        pass  
    return pipe


def img_args(
             prompt,
             negative_prompt,
             model_id_repo = "pokemon",
             scheduler= "Euler a",
             height=896,
             width=896,
             num_inference_steps = 30,
             guidance_scale = 7.5,
             num_images_per_prompt = 1,
             seed = 0
             ):

    print(model_id_repo)
    print(scheduler)
    print(prompt,"&&&&&&&&&&&&&&&&")

    pipe = set_pipeline(model_id_repo,scheduler)

    if seed == 0:
        seed = random.randint(0,25647981548564)
        print(f"random seed :{seed}")
        generator = torch.manual_seed(seed)
    else:
        generator = torch.manual_seed(seed)
        print(f"manual seed :{seed}")

    image = pipe(prompt=prompt,
                 negative_prompt = negative_prompt,
                 height = height,
                 width = width,
                 num_inference_steps = num_inference_steps,
                 guidance_scale = guidance_scale,
                 num_images_per_prompt = num_images_per_prompt, # default 1
                 generator = generator,                
                 ).images
    return image


block = gr.Blocks().queue()
block.title = "Inpaint Anything"
with block as image_gen:
    with gr.Column():
        with gr.Row():
            gr.Markdown("## Pokemon Image Generation")
        with gr.Row():
            with gr.Column():
                prompt = gr.Textbox(placeholder="what you want to generate",label="Positive Prompt")
                negative_prompt = gr.Textbox(placeholder="what you don't want to generate",label="Negative prompt")
                run_btn = gr.Button("image generation", elem_id="select_btn", variant="primary")
                with gr.Accordion(label="Advance Options",open=False):
                    model_selection = gr.Dropdown(choices=["pokemon"],value="pokemon",label="Models")
                    schduler_selection = gr.Dropdown(choices=["DDIM","Euler","Euler a","UniPC","DPM2 Karras","DPM2 a Karras","PNDM","DPM++ 2M Karras","DPM++ 2M SDE Karras"],value="Euler a",label="Scheduler")
                    guidance_scale_slider = gr.Slider(label="guidance_scale", minimum=0, maximum=15, value=7.5, step=0.5)
                    num_images_per_prompt_slider = gr.Slider(label="num_images_per_prompt", minimum=0, maximum=5, value=1, step=1)
                    height_slider = gr.Slider(label="height", minimum=0, maximum=1024, value=512, step=1)
                    width_slider = gr.Slider(label="width", minimum=0, maximum=1024, value=512, step=1)
                    num_inference_steps_slider = gr.Slider(label="num_inference_steps", minimum=0, maximum=150, value=30, step=1)
                    seed_slider = gr.Slider(label="Seed Slider", minimum=0, maximum=256479815, value=0, step=1)
            with gr.Column():
                out_img = gr.Gallery(label='Output', show_label=False, elem_id="gallery", preview=True)
            
                    
    run_btn.click(fn=img_args,inputs=[prompt,negative_prompt,model_selection,schduler_selection,height_slider,width_slider,num_inference_steps_slider,guidance_scale_slider,num_images_per_prompt_slider,seed_slider],outputs=[out_img])            
image_gen.launch()