fahad11182 commited on
Commit
828ab31
1 Parent(s): e6829f3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +115 -70
app.py CHANGED
@@ -7,7 +7,7 @@ import random
7
  # Load the InstructPix2Pix model
8
  model_id = "timbrooks/instruct-pix2pix"
9
  pipe = StableDiffusionInstructPix2PixPipeline.from_pretrained(model_id, torch_dtype=torch.float16)
10
- pipe = pipe.to("cpu")
11
 
12
  # Initialize a random seed
13
  seed = random.randint(0, 10000)
@@ -18,97 +18,142 @@ def change_style():
18
  seed = random.randint(0, 10000)
19
  return f"Seed changed to: {seed}"
20
 
21
- # Furniture adding function
22
- def add_furniture(image, style, color, room_type):
23
- # Construct the furniture prompt
24
- prompt = f"Add {style} style furniture with a {color} tone to this {room_type}."
25
-
26
- # Apply the edit using InstructPix2Pix
27
- edited_image = pipe(prompt=prompt, image=image, num_inference_steps=50, guidance_scale=7.5, generator=torch.manual_seed(seed)).images[0]
28
- return edited_image
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
29
 
30
  # General image editing function
31
  def edit_image(image, instruction):
32
- # Apply the edit using InstructPix2Pix
33
- edited_image = pipe(prompt=instruction, image=image, num_inference_steps=50, guidance_scale=7.5, generator=torch.manual_seed(seed)).images[0]
34
- return edited_image
35
-
36
- # Gradio interface for furniture adding
37
- def furniture_interface():
38
- with gr.Blocks() as demo_furniture:
39
- gr.Markdown("## Furniture Adding App")
40
-
41
- # Image upload
42
- image_input = gr.Image(type="pil", label="Upload an Image")
43
-
44
- # Dropdown for furniture style
45
- style_input = gr.Dropdown(["Modern", "Classic", "Minimalist", "Vintage"], label="Choose Furniture Style")
46
-
47
- # Dropdown for color
48
- color_input = gr.Dropdown(["Blue", "Green", "Red", "White", "Black"], label="Choose Furniture Color")
49
-
50
- # Dropdown for room type
51
- room_type_input = gr.Dropdown(["Living Room", "Bedroom", "Office", "Dining Room"], label="Room Type")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
52
 
53
- # Display output image
54
- result_image = gr.Image(label="Edited Image")
55
-
56
- # Button to apply the furniture transformation
57
- submit_button = gr.Button("Add Furniture")
58
-
59
- # Button to change the seed (style)
60
- change_style_button = gr.Button("Change the Style")
61
-
62
- # Output for seed change message
63
- seed_output = gr.Textbox(label="Seed Info", interactive=False)
64
-
65
- # Define action on button click
66
- submit_button.click(fn=add_furniture, inputs=[image_input, style_input, color_input, room_type_input], outputs=result_image)
67
- change_style_button.click(fn=change_style, outputs=seed_output)
68
-
69
- return demo_furniture
70
 
71
  # Gradio interface for general image editing
72
  def general_editing_interface():
73
- with gr.Blocks() as demo_general:
74
- gr.Markdown("## General Image Editing App")
75
 
76
- # Image upload
77
- image_input = gr.Image(type="pil", label="Upload an Image")
78
 
79
- # Textbox for instruction
80
- instruction_input = gr.Textbox(label="Enter the Instruction", placeholder="Describe the changes (e.g., 'Add sunglasses to the person')")
81
 
82
- # Display output image
83
- result_image = gr.Image(label="Edited Image")
84
 
85
- # Button to apply the transformation
86
- submit_button = gr.Button("Apply Edit")
87
 
88
- # Button to change the seed (style)
89
- change_style_button = gr.Button("Change the Style")
90
 
91
- # Output for seed change message
92
- seed_output = gr.Textbox(label="Seed Info", interactive=False)
93
 
94
- # Define action on button click
95
- submit_button.click(fn=edit_image, inputs=[image_input, instruction_input], outputs=result_image)
96
- change_style_button.click(fn=change_style, outputs=seed_output)
97
 
98
- return demo_general
99
 
 
100
  # Launch both Gradio apps
101
- furniture_app = furniture_interface()
102
  general_editing_app = general_editing_interface()
103
 
104
  with gr.Blocks() as combined_demo:
105
- gr.Markdown("## Select the Application")
106
 
107
- with gr.Tab("Furniture Adding App"):
108
- furniture_app.render()
109
 
110
- with gr.Tab("General Image Editing App"):
111
- general_editing_app.render()
112
 
113
  # Launch the combined Gradio app
114
  combined_demo.launch()
 
7
  # Load the InstructPix2Pix model
8
  model_id = "timbrooks/instruct-pix2pix"
9
  pipe = StableDiffusionInstructPix2PixPipeline.from_pretrained(model_id, torch_dtype=torch.float16)
10
+ pipe = pipe.to("cuda")
11
 
12
  # Initialize a random seed
13
  seed = random.randint(0, 10000)
 
18
  seed = random.randint(0, 10000)
19
  return f"Seed changed to: {seed}"
20
 
21
+ # Changign the walls' color function
22
+ def change_color(image,color):
23
+ # Construct the furniture prompt
24
+ prompt = f"paint the walls with {color} color "
25
+
26
+ # Text CFG (guidance_scale) controls how strongly the model follows the prompt
27
+ text_cfg = 7.5
28
+
29
+ # Image CFG: Although not explicitly part of InstructPix2Pix, you can "simulate" image preservation
30
+ # by lowering the impact of the guidance. Here, we assume lower guidance impacts image preservation.
31
+ image_cfg = 1.5
32
+
33
+ # Apply the edit using InstructPix2Pix, with text CFG and image CFG influencing the guidance scale
34
+ edited_image = pipe(
35
+ prompt=prompt,
36
+ image=image,
37
+ num_inference_steps=70, # Number of diffusion steps
38
+ guidance_scale=text_cfg, # Text CFG for following the prompt
39
+ image_guidance_scale=image_cfg, # Simulated Image CFG to preserve image content
40
+ generator=torch.manual_seed(seed) # Random seed for consistency
41
+ ).images[0]
42
+
43
+ return edited_image
44
+
45
 
46
  # General image editing function
47
  def edit_image(image, instruction):
48
+ # Text CFG (guidance_scale) controls how strongly the model follows the prompt
49
+ text_cfg = 12.0
50
+
51
+ # Image CFG: Simulated value for preserving the original image content
52
+ image_cfg = 1.5
53
+
54
+ # Apply the edit using InstructPix2Pix, with text CFG and simulated image CFG
55
+ edited_image = pipe(
56
+ prompt=instruction,
57
+ image=image,
58
+ num_inference_steps=70, # Number of diffusion steps
59
+ guidance_scale=text_cfg, # Text CFG for following the prompt
60
+ image_guidance_scale=image_cfg, # Simulated Image CFG to preserve image content
61
+ generator=torch.manual_seed(seed) # Random seed for consistency
62
+ ).images[0]
63
+
64
+ return edited_image
65
+
66
+
67
+ # Gradio interface for image editing
68
+ def image_interface():
69
+ with gr.Blocks() as demo_color:
70
+ gr.Markdown("## Furniture Adding App")
71
+
72
+ # Image upload
73
+ image_input = gr.Image(type="pil", label="Upload Room Image")
74
+
75
+ # List of common painting colors
76
+ common_colors = [
77
+ "Alabaster", # Off-white
78
+ "Agreeable Gray", # Warm gray
79
+ "Sea Salt", # Soft greenish-blue
80
+ "Pure White", # Bright white
81
+ "Accessible Beige", # Warm beige
82
+ "Mindful Gray", # Cool gray
83
+ "Peppercorn", # Dark charcoal gray
84
+ "Hale Navy", # Dark navy blue
85
+ "Tricorn Black", # Pure black
86
+ "Pale Oak", # Soft taupe
87
+ "Silver Strand", # Soft blue-gray
88
+ "Rainwashed", # Light aqua
89
+ "Orange Burst", # Bright orange
90
+ "Sunny Yellow", # Bright yellow
91
+ "Sage Green", # Muted green
92
+ "Firebrick Red", # Deep red
93
+ "Lavender", # Soft purple
94
+ "Sky Blue", # Light blue
95
+ "Coral", # Vibrant coral
96
+
97
+ ]
98
+
99
+
100
+ # Dropdown for wall color
101
+ color_input = gr.Dropdown(common_colors, label="Choose Wall Color")
102
+
103
+ # Display output image
104
+ result_image = gr.Image(label="Edited Image")
105
+
106
+ # Button to apply the wall color transformation
107
+ submit_button = gr.Button("Paint the walls")
108
+
109
+ # Define action on button click
110
+ submit_button.click(fn=change_color, inputs=[image_input, color_input], outputs=result_image)
111
+
112
+ return demo_color
113
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
114
 
115
  # Gradio interface for general image editing
116
  def general_editing_interface():
117
+ with gr.Blocks() as demo_general:
118
+ gr.Markdown("## General Image Editing App")
119
 
120
+ # Image upload
121
+ image_input = gr.Image(type="pil", label="Upload an Image")
122
 
123
+ # Textbox for instruction
124
+ instruction_input = gr.Textbox(label="Enter the Instruction", placeholder="Describe the changes (e.g., 'Make it snowy')")
125
 
126
+ # Display output image
127
+ result_image = gr.Image(label="Edited Image")
128
 
129
+ # Button to apply the transformation
130
+ submit_button = gr.Button("Apply Edit")
131
 
132
+ # Button to change the seed (style)
133
+ change_style_button = gr.Button("Change the Style")
134
 
135
+ # Output for seed change message
136
+ seed_output = gr.Textbox(label="Seed Info", interactive=False)
137
 
138
+ # Define action on button click
139
+ submit_button.click(fn=edit_image, inputs=[image_input, instruction_input], outputs=result_image)
140
+ change_style_button.click(fn=change_style, outputs=seed_output)
141
 
142
+ return demo_general
143
 
144
+
145
  # Launch both Gradio apps
146
+ color_app = image_interface()
147
  general_editing_app = general_editing_interface()
148
 
149
  with gr.Blocks() as combined_demo:
150
+ gr.Markdown("## Select the Application")
151
 
152
+ with gr.Tab("General Image Editing App"):
153
+ general_editing_app.render()
154
 
155
+ with gr.Tab("Changing The Paint App"):
156
+ color_app.render()
157
 
158
  # Launch the combined Gradio app
159
  combined_demo.launch()