Spaces:
Running
on
A10G
Running
on
A10G
BertChristiaens
commited on
Commit
•
13e5061
1
Parent(s):
71af695
add caching
Browse files
app.py
CHANGED
@@ -191,6 +191,11 @@ def make_editing_canvas(canvas_color, brush, _reset_state, generation_mode, pain
|
|
191 |
|
192 |
|
193 |
elif generation_mode == "Re-generate objects":
|
|
|
|
|
|
|
|
|
|
|
194 |
canvas = st_canvas(
|
195 |
**canvas_dict,
|
196 |
)
|
@@ -207,7 +212,7 @@ def make_editing_canvas(canvas_color, brush, _reset_state, generation_mode, pain
|
|
207 |
st.session_state['unique_colors'] = unique_colors
|
208 |
|
209 |
chosen_colors = st.multiselect(
|
210 |
-
label="Choose
|
211 |
options=st.session_state['unique_colors'],
|
212 |
key='chosen_colors',
|
213 |
default=st.session_state['unique_colors'],
|
|
|
191 |
|
192 |
|
193 |
elif generation_mode == "Re-generate objects":
|
194 |
+
st.write("This mode allows you to choose which objects you want to re-generate in the image. "
|
195 |
+
"Use the selection dropdown to add or remove objects. If you are ready, press the generate button"
|
196 |
+
" to generate the image, which can take up to 30 seconds. If you want to improve the generated image, click"
|
197 |
+
" the 'move image to input' button."
|
198 |
+
)
|
199 |
canvas = st_canvas(
|
200 |
**canvas_dict,
|
201 |
)
|
|
|
212 |
st.session_state['unique_colors'] = unique_colors
|
213 |
|
214 |
chosen_colors = st.multiselect(
|
215 |
+
label="Choose which concepts you want to regenerate in the image",
|
216 |
options=st.session_state['unique_colors'],
|
217 |
key='chosen_colors',
|
218 |
default=st.session_state['unique_colors'],
|
models.py
CHANGED
@@ -76,6 +76,7 @@ def postprocess_image_masking(inpainted: Image, image: Image, mask: Image) -> Im
|
|
76 |
|
77 |
|
78 |
@st.experimental_singleton(max_entries=1)
|
|
|
79 |
def get_controlnet() -> ControlNetModel:
|
80 |
"""Method to load the controlnet model
|
81 |
Returns:
|
@@ -100,6 +101,7 @@ def get_controlnet() -> ControlNetModel:
|
|
100 |
|
101 |
|
102 |
@st.experimental_singleton(max_entries=1)
|
|
|
103 |
def get_segmentation_pipeline() -> Tuple[AutoImageProcessor, UperNetForSemanticSegmentation]:
|
104 |
"""Method to load the segmentation pipeline
|
105 |
Returns:
|
@@ -112,6 +114,7 @@ def get_segmentation_pipeline() -> Tuple[AutoImageProcessor, UperNetForSemanticS
|
|
112 |
|
113 |
|
114 |
@st.experimental_singleton(max_entries=1)
|
|
|
115 |
def get_inpainting_pipeline() -> StableDiffusionInpaintPipeline:
|
116 |
"""Method to load the inpainting pipeline
|
117 |
Returns:
|
|
|
76 |
|
77 |
|
78 |
@st.experimental_singleton(max_entries=1)
|
79 |
+
@st.cache_resource
|
80 |
def get_controlnet() -> ControlNetModel:
|
81 |
"""Method to load the controlnet model
|
82 |
Returns:
|
|
|
101 |
|
102 |
|
103 |
@st.experimental_singleton(max_entries=1)
|
104 |
+
@st.cache_resource
|
105 |
def get_segmentation_pipeline() -> Tuple[AutoImageProcessor, UperNetForSemanticSegmentation]:
|
106 |
"""Method to load the segmentation pipeline
|
107 |
Returns:
|
|
|
114 |
|
115 |
|
116 |
@st.experimental_singleton(max_entries=1)
|
117 |
+
@st.cache_resource
|
118 |
def get_inpainting_pipeline() -> StableDiffusionInpaintPipeline:
|
119 |
"""Method to load the inpainting pipeline
|
120 |
Returns:
|