Spaces:
Running
on
Zero
Running
on
Zero
Traceback
Browse files
app.py
CHANGED
|
@@ -16,6 +16,7 @@ import requests
|
|
| 16 |
import base64
|
| 17 |
import io
|
| 18 |
import tempfile
|
|
|
|
| 19 |
MAX_SEED = np.iinfo(np.int32).max
|
| 20 |
TMP_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'tmp')
|
| 21 |
os.makedirs(TMP_DIR, exist_ok=True)
|
|
@@ -141,198 +142,167 @@ def generate_model_from_images_and_upload(
|
|
| 141 |
model_description: str,
|
| 142 |
req: gr.Request
|
| 143 |
) -> str:
|
| 144 |
-
|
| 145 |
-
|
| 146 |
-
|
| 147 |
-
|
| 148 |
-
|
| 149 |
-
|
| 150 |
-
|
| 151 |
-
|
| 152 |
-
|
| 153 |
-
|
| 154 |
-
|
| 155 |
-
|
| 156 |
-
|
| 157 |
-
|
| 158 |
-
|
| 159 |
-
|
| 160 |
-
|
| 161 |
-
|
| 162 |
-
|
| 163 |
-
|
| 164 |
-
|
| 165 |
-
|
| 166 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 167 |
|
| 168 |
-
if input_type == 'url':
|
| 169 |
-
# For URLs, Gradio should have downloaded the image and put its local path in file_data_obj.get('path')
|
| 170 |
-
img_to_open_path = file_data_obj.get('path')
|
| 171 |
if not img_to_open_path:
|
| 172 |
-
print(f"Error:
|
| 173 |
-
continue
|
| 174 |
-
print(f"Python INFO: 'url' type. Using Gradio-provided path for '{current_image_name}': {img_to_open_path}")
|
| 175 |
-
|
| 176 |
-
elif input_type == 'base64':
|
| 177 |
-
base64_data_uri = file_data_obj.get('data_uri') # Get from 'data_uri' key sent by Node.js
|
| 178 |
-
if not base64_data_uri or not isinstance(base64_data_uri, str) or not base64_data_uri.startswith('data:image'):
|
| 179 |
-
print(f"Error: For 'base64' input_type, 'data_uri' was missing or invalid in item {i}: {file_data_obj}")
|
| 180 |
-
continue # Skip this image
|
| 181 |
-
|
| 182 |
try:
|
| 183 |
-
print(f"Python INFO:
|
| 184 |
-
|
| 185 |
-
|
| 186 |
-
|
| 187 |
-
|
| 188 |
-
|
| 189 |
-
|
| 190 |
-
|
| 191 |
-
|
| 192 |
-
|
| 193 |
-
|
| 194 |
-
mime_subtype = header.split('/')[1].split(';')[0]
|
| 195 |
-
if mime_subtype:
|
| 196 |
-
file_extension = "." + mime_subtype
|
| 197 |
-
except Exception as e_ext:
|
| 198 |
-
print(f"Python WARNING: Could not parse precise extension for '{current_image_name}' from header/name: {e_ext}. Using {file_extension}.")
|
| 199 |
-
|
| 200 |
-
# Create a temporary file to write the decoded data
|
| 201 |
-
with tempfile.NamedTemporaryFile(delete=False, suffix=file_extension, dir=TMP_DIR) as tmp_file:
|
| 202 |
-
tmp_file.write(image_data_bytes)
|
| 203 |
-
img_to_open_path = tmp_file.name # Path to the newly saved temp file
|
| 204 |
-
|
| 205 |
-
print(f"Python INFO: Decoded base64 for '{current_image_name}' and saved to temporary file: {img_to_open_path}")
|
| 206 |
-
|
| 207 |
-
except Exception as e_b64:
|
| 208 |
-
print(f"Error processing base64 image data for item {i} ('{current_image_name}'): {e_b64}")
|
| 209 |
-
import traceback
|
| 210 |
traceback.print_exc()
|
| 211 |
-
|
| 212 |
-
|
| 213 |
-
|
| 214 |
-
|
| 215 |
-
|
| 216 |
-
|
| 217 |
-
|
| 218 |
-
|
| 219 |
-
|
| 220 |
-
|
| 221 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 222 |
try:
|
| 223 |
-
|
| 224 |
-
|
| 225 |
-
|
| 226 |
-
|
| 227 |
-
|
| 228 |
-
|
| 229 |
-
|
| 230 |
-
|
| 231 |
-
|
| 232 |
-
|
| 233 |
-
|
| 234 |
-
|
| 235 |
-
|
| 236 |
-
|
| 237 |
-
|
| 238 |
-
|
| 239 |
-
|
| 240 |
-
|
| 241 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 242 |
finally:
|
| 243 |
-
|
| 244 |
-
|
| 245 |
-
|
| 246 |
-
|
| 247 |
-
|
| 248 |
-
|
| 249 |
-
|
| 250 |
-
|
| 251 |
-
|
| 252 |
-
|
| 253 |
-
|
| 254 |
-
|
| 255 |
-
|
| 256 |
-
print("Python ERROR: No valid images could be processed from the input list.")
|
| 257 |
-
raise gr.Error("No valid images could be processed.")
|
| 258 |
-
|
| 259 |
-
print(f"Python INFO: Total PIL images ready for pipeline: {len(pil_images)}")
|
| 260 |
-
|
| 261 |
-
print("Running multi-image pipeline...")
|
| 262 |
-
outputs = pipeline.run_multi_image(
|
| 263 |
-
pil_images,
|
| 264 |
-
seed=seed_val,
|
| 265 |
-
formats=["gaussian", "mesh"],
|
| 266 |
-
preprocess_image=False,
|
| 267 |
-
sparse_structure_sampler_params={
|
| 268 |
-
"steps": ss_sampling_steps_val,
|
| 269 |
-
"cfg_strength": ss_guidance_strength_val,
|
| 270 |
-
},
|
| 271 |
-
slat_sampler_params={
|
| 272 |
-
"steps": slat_sampling_steps_val,
|
| 273 |
-
"cfg_strength": slat_guidance_strength_val,
|
| 274 |
-
},
|
| 275 |
-
mode=multiimage_algo_val,
|
| 276 |
-
)
|
| 277 |
-
print("Multi-image pipeline completed.")
|
| 278 |
-
|
| 279 |
-
gs_result = outputs['gaussian'][0]
|
| 280 |
-
mesh_result = outputs['mesh'][0]
|
| 281 |
-
|
| 282 |
-
print(f"Extracting GLB with simplify: {mesh_simplify_val}, texture_size: {texture_size_val}")
|
| 283 |
-
glb_data = postprocessing_utils.to_glb(gs_result, mesh_result, simplify=mesh_simplify_val, texture_size=texture_size_val, verbose=False)
|
| 284 |
-
|
| 285 |
-
temp_glb_filename = 'temp_output_image_model.glb'
|
| 286 |
-
temp_glb_path = os.path.join(user_dir, temp_glb_filename)
|
| 287 |
-
print(f"Exporting GLB to temporary path: {temp_glb_path}")
|
| 288 |
-
glb_data.export(temp_glb_path)
|
| 289 |
-
|
| 290 |
-
torch.cuda.empty_cache()
|
| 291 |
-
print("CUDA cache cleared.")
|
| 292 |
-
|
| 293 |
-
print(f"Uploading GLB from {temp_glb_path} to {NODE_SERVER_UPLOAD_URL}")
|
| 294 |
-
persistent_url = None
|
| 295 |
-
upload_prompt_name = model_description or "_".join(filter(None, image_basenames_for_prompt)) or "imagen_generated_model"
|
| 296 |
-
upload_prompt_name = "".join(c if c.isalnum() or c in ['_', '-'] else '_' for c in upload_prompt_name)[:50]
|
| 297 |
-
|
| 298 |
-
|
| 299 |
-
try:
|
| 300 |
-
with open(temp_glb_path, "rb") as f:
|
| 301 |
-
files = {"modelFile": (temp_glb_filename, f, "model/gltf-binary")}
|
| 302 |
-
payload = {
|
| 303 |
-
"clientType": "imagen",
|
| 304 |
-
"prompt": upload_prompt_name,
|
| 305 |
-
"modelStage": "imagen_mesh"
|
| 306 |
-
}
|
| 307 |
-
print(f"Upload payload to Node.js: {payload}")
|
| 308 |
-
response = requests.post(NODE_SERVER_UPLOAD_URL, files=files, data=payload, timeout=120)
|
| 309 |
-
response.raise_for_status()
|
| 310 |
-
result = response.json()
|
| 311 |
-
persistent_url = result.get("persistentUrl")
|
| 312 |
-
if not persistent_url:
|
| 313 |
-
print(f"No persistent URL in Node.js server response: {result}")
|
| 314 |
-
raise ValueError("Upload successful, but no persistent URL returned from Node.js server")
|
| 315 |
-
print(f"Successfully uploaded to Node server. Persistent URL: {persistent_url}")
|
| 316 |
-
except requests.exceptions.RequestException as upload_err:
|
| 317 |
-
print(f"FAILED to upload GLB to Node server: {upload_err}")
|
| 318 |
-
if hasattr(upload_err, 'response') and upload_err.response is not None:
|
| 319 |
-
print(f"Node server response status: {upload_err.response.status_code}")
|
| 320 |
-
print(f"Node server response text: {upload_err.response.text}")
|
| 321 |
-
raise gr.Error(f"Failed to upload result to backend server: {upload_err}")
|
| 322 |
-
except Exception as e:
|
| 323 |
-
print(f"UNEXPECTED error during upload: {e}", exc_info=True)
|
| 324 |
-
raise gr.Error(f"Unexpected error during upload: {e}")
|
| 325 |
-
finally:
|
| 326 |
-
if os.path.exists(temp_glb_path):
|
| 327 |
-
print(f"Cleaning up temporary GLB: {temp_glb_path}")
|
| 328 |
-
os.remove(temp_glb_path)
|
| 329 |
-
|
| 330 |
-
if not persistent_url:
|
| 331 |
-
print("Failed to obtain a persistent URL for the generated model.")
|
| 332 |
-
raise gr.Error("Failed to obtain a persistent URL for the generated model.")
|
| 333 |
-
|
| 334 |
-
print(f"Returning persistent URL: {persistent_url}")
|
| 335 |
-
return persistent_url
|
| 336 |
|
| 337 |
# Interfaz Gradio
|
| 338 |
with gr.Blocks(delete_cache=(600, 600)) as demo:
|
|
|
|
| 16 |
import base64
|
| 17 |
import io
|
| 18 |
import tempfile
|
| 19 |
+
import traceback
|
| 20 |
MAX_SEED = np.iinfo(np.int32).max
|
| 21 |
TMP_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'tmp')
|
| 22 |
os.makedirs(TMP_DIR, exist_ok=True)
|
|
|
|
| 142 |
model_description: str,
|
| 143 |
req: gr.Request
|
| 144 |
) -> str:
|
| 145 |
+
try:
|
| 146 |
+
user_dir = os.path.join(TMP_DIR, str(req.session_hash))
|
| 147 |
+
os.makedirs(user_dir, exist_ok=True)
|
| 148 |
+
|
| 149 |
+
print(f"Python DEBUG: Raw image_inputs (as received by function): {image_inputs}")
|
| 150 |
+
print(f"Python DEBUG: Type of image_inputs: {type(image_inputs)}")
|
| 151 |
+
if isinstance(image_inputs, list):
|
| 152 |
+
print(f"Python DEBUG: Length of image_inputs list: {len(image_inputs)}")
|
| 153 |
+
if len(image_inputs) > 0 and isinstance(image_inputs[0], dict):
|
| 154 |
+
print(f"Python DEBUG: First element of image_inputs (should be a dict): {image_inputs[0]}")
|
| 155 |
+
print(f"Python DEBUG: Type of first element: {type(image_inputs[0])}")
|
| 156 |
+
print(f"Python DEBUG: Received input_type from Node.js: '{input_type}'")
|
| 157 |
+
|
| 158 |
+
pil_images = []
|
| 159 |
+
image_basenames_for_prompt = []
|
| 160 |
+
|
| 161 |
+
for i, file_data_obj in enumerate(image_inputs):
|
| 162 |
+
img_to_open_path = None
|
| 163 |
+
current_image_name = file_data_obj.get('name', f"image_{i}.png")
|
| 164 |
+
print(f"Python DEBUG: Processing item {i}: {file_data_obj}, current_image_name: {current_image_name}")
|
| 165 |
+
|
| 166 |
+
if input_type == 'url':
|
| 167 |
+
img_to_open_path = file_data_obj.get('path')
|
| 168 |
+
if not img_to_open_path:
|
| 169 |
+
print(f"Error: For 'url' input_type, 'path' was missing in item {i}: {file_data_obj}")
|
| 170 |
+
continue
|
| 171 |
+
print(f"Python INFO: 'url' type. Using Gradio-provided path for '{current_image_name}': {img_to_open_path}")
|
| 172 |
+
elif input_type == 'base64':
|
| 173 |
+
base64_data_uri = file_data_obj.get('data_uri')
|
| 174 |
+
if not base64_data_uri or not isinstance(base64_data_uri, str) or not base64_data_uri.startswith('data:image'):
|
| 175 |
+
print(f"Error: For 'base64' input_type, 'data_uri' was missing or invalid in item {i}: {file_data_obj}")
|
| 176 |
+
continue
|
| 177 |
+
try:
|
| 178 |
+
print(f"Python INFO: 'base64' type. Decoding data_uri for '{current_image_name}'...")
|
| 179 |
+
header, encoded_data = base64_data_uri.split(',', 1)
|
| 180 |
+
image_data_bytes = base64.b64decode(encoded_data)
|
| 181 |
+
file_extension = ".png"
|
| 182 |
+
try:
|
| 183 |
+
parsed_extension = os.path.splitext(current_image_name)[1]
|
| 184 |
+
if parsed_extension:
|
| 185 |
+
file_extension = parsed_extension
|
| 186 |
+
elif '/' in header and ';' in header:
|
| 187 |
+
mime_subtype = header.split('/')[1].split(';')[0]
|
| 188 |
+
if mime_subtype:
|
| 189 |
+
file_extension = "." + mime_subtype
|
| 190 |
+
except Exception as e_ext:
|
| 191 |
+
print(f"Python WARNING: Could not parse precise extension for '{current_image_name}' from header/name: {e_ext}. Using {file_extension}.")
|
| 192 |
+
with tempfile.NamedTemporaryFile(delete=False, suffix=file_extension, dir=TMP_DIR) as tmp_file:
|
| 193 |
+
tmp_file.write(image_data_bytes)
|
| 194 |
+
img_to_open_path = tmp_file.name
|
| 195 |
+
print(f"Python INFO: Decoded base64 for '{current_image_name}' and saved to temporary file: {img_to_open_path}")
|
| 196 |
+
except Exception as e_b64:
|
| 197 |
+
print(f"Error processing base64 image data for item {i} ('{current_image_name}'): {e_b64}")
|
| 198 |
+
traceback.print_exc()
|
| 199 |
+
continue
|
| 200 |
+
else:
|
| 201 |
+
print(f"Error: Unrecognized input_type '{input_type}' for item {i}. Skipping.")
|
| 202 |
+
continue
|
| 203 |
|
|
|
|
|
|
|
|
|
|
| 204 |
if not img_to_open_path:
|
| 205 |
+
print(f"Error: No valid image path could be derived for item {i} (name: '{current_image_name}', type: '{input_type}'). Skipping.")
|
| 206 |
+
continue
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 207 |
try:
|
| 208 |
+
print(f"Python INFO: Opening image from path: {img_to_open_path} (intended name for prompt: {current_image_name})")
|
| 209 |
+
img = Image.open(img_to_open_path)
|
| 210 |
+
image_basenames_for_prompt.append(os.path.splitext(current_image_name)[0] or f"image_{i}")
|
| 211 |
+
if img.mode == 'RGBA' or img.mode == 'P':
|
| 212 |
+
print(f"Converting image '{current_image_name}' from {img.mode} to RGB")
|
| 213 |
+
img = img.convert('RGB')
|
| 214 |
+
processed_img = pipeline.preprocess_image(img)
|
| 215 |
+
pil_images.append(processed_img)
|
| 216 |
+
print(f"Image '{current_image_name}' (item {i+1}) processed successfully and added to list.")
|
| 217 |
+
except Exception as e_img_proc:
|
| 218 |
+
print(f"Error opening or processing image at '{img_to_open_path}' (item {i}, name: '{current_image_name}'): {e_img_proc}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 219 |
traceback.print_exc()
|
| 220 |
+
finally:
|
| 221 |
+
if input_type == 'base64' and img_to_open_path and os.path.exists(img_to_open_path):
|
| 222 |
+
if TMP_DIR in os.path.abspath(img_to_open_path):
|
| 223 |
+
try:
|
| 224 |
+
os.remove(img_to_open_path)
|
| 225 |
+
print(f"Python INFO: Removed temporary base64 file: {img_to_open_path}")
|
| 226 |
+
except Exception as e_remove:
|
| 227 |
+
print(f"Python WARNING: Could not remove temp file {img_to_open_path}: {e_remove}")
|
| 228 |
+
else:
|
| 229 |
+
print(f"Python WARNING: Skipped deletion of temp file as it's not in TMP_DIR (or was a Gradio-managed URL path): {img_to_open_path}")
|
| 230 |
+
|
| 231 |
+
if not pil_images:
|
| 232 |
+
print("Python ERROR: No valid images could be processed from the input list.")
|
| 233 |
+
raise gr.Error("No valid images could be processed.")
|
| 234 |
+
print(f"Python INFO: Total PIL images ready for pipeline: {len(pil_images)}")
|
| 235 |
+
print("Running multi-image pipeline...")
|
| 236 |
+
outputs = pipeline.run_multi_image(
|
| 237 |
+
pil_images,
|
| 238 |
+
seed=seed_val,
|
| 239 |
+
formats=["gaussian", "mesh"],
|
| 240 |
+
preprocess_image=False,
|
| 241 |
+
sparse_structure_sampler_params={
|
| 242 |
+
"steps": ss_sampling_steps_val,
|
| 243 |
+
"cfg_strength": ss_guidance_strength_val,
|
| 244 |
+
},
|
| 245 |
+
slat_sampler_params={
|
| 246 |
+
"steps": slat_sampling_steps_val,
|
| 247 |
+
"cfg_strength": slat_guidance_strength_val,
|
| 248 |
+
},
|
| 249 |
+
mode=multiimage_algo_val,
|
| 250 |
+
)
|
| 251 |
+
print("Multi-image pipeline completed.")
|
| 252 |
+
gs_result = outputs['gaussian'][0]
|
| 253 |
+
mesh_result = outputs['mesh'][0]
|
| 254 |
+
print(f"Extracting GLB with simplify: {mesh_simplify_val}, texture_size: {texture_size_val}")
|
| 255 |
+
glb_data = postprocessing_utils.to_glb(gs_result, mesh_result, simplify=mesh_simplify_val, texture_size=texture_size_val, verbose=False)
|
| 256 |
+
temp_glb_filename = 'temp_output_image_model.glb'
|
| 257 |
+
temp_glb_path = os.path.join(user_dir, temp_glb_filename)
|
| 258 |
+
print(f"Exporting GLB to temporary path: {temp_glb_path}")
|
| 259 |
+
glb_data.export(temp_glb_path)
|
| 260 |
+
torch.cuda.empty_cache()
|
| 261 |
+
print("CUDA cache cleared.")
|
| 262 |
+
print(f"Uploading GLB from {temp_glb_path} to {NODE_SERVER_UPLOAD_URL}")
|
| 263 |
+
persistent_url = None
|
| 264 |
+
upload_prompt_name = model_description or "_".join(filter(None, image_basenames_for_prompt)) or "imagen_generated_model"
|
| 265 |
+
upload_prompt_name = "".join(c if c.isalnum() or c in ['_', '-'] else '_' for c in upload_prompt_name)[:50]
|
| 266 |
try:
|
| 267 |
+
with open(temp_glb_path, "rb") as f:
|
| 268 |
+
files = {"modelFile": (temp_glb_filename, f, "model/gltf-binary")}
|
| 269 |
+
payload = {
|
| 270 |
+
"clientType": "imagen",
|
| 271 |
+
"prompt": upload_prompt_name,
|
| 272 |
+
"modelStage": "imagen_mesh"
|
| 273 |
+
}
|
| 274 |
+
print(f"Upload payload to Node.js: {payload}")
|
| 275 |
+
response = requests.post(NODE_SERVER_UPLOAD_URL, files=files, data=payload, timeout=120)
|
| 276 |
+
response.raise_for_status()
|
| 277 |
+
result = response.json()
|
| 278 |
+
persistent_url = result.get("persistentUrl")
|
| 279 |
+
if not persistent_url:
|
| 280 |
+
print(f"No persistent URL in Node.js server response: {result}")
|
| 281 |
+
raise ValueError("Upload successful, but no persistent URL returned from Node.js server")
|
| 282 |
+
print(f"Successfully uploaded to Node server. Persistent URL: {persistent_url}")
|
| 283 |
+
except requests.exceptions.RequestException as upload_err:
|
| 284 |
+
print(f"FAILED to upload GLB to Node server: {upload_err}")
|
| 285 |
+
if hasattr(upload_err, 'response') and upload_err.response is not None:
|
| 286 |
+
print(f"Node server response status: {upload_err.response.status_code}")
|
| 287 |
+
print(f"Node server response text: {upload_err.response.text}")
|
| 288 |
+
raise gr.Error(f"Failed to upload result to backend server: {upload_err}")
|
| 289 |
+
except Exception as e:
|
| 290 |
+
print(f"UNEXPECTED error during upload: {e}", exc_info=True)
|
| 291 |
+
raise gr.Error(f"Unexpected error during upload: {e}")
|
| 292 |
finally:
|
| 293 |
+
if os.path.exists(temp_glb_path):
|
| 294 |
+
print(f"Cleaning up temporary GLB: {temp_glb_path}")
|
| 295 |
+
os.remove(temp_glb_path)
|
| 296 |
+
if not persistent_url:
|
| 297 |
+
print("Failed to obtain a persistent URL for the generated model.")
|
| 298 |
+
raise gr.Error("Failed to obtain a persistent URL for the generated model.")
|
| 299 |
+
print(f"Returning persistent URL: {persistent_url}")
|
| 300 |
+
return persistent_url
|
| 301 |
+
|
| 302 |
+
except Exception as e_main:
|
| 303 |
+
print(f"TOP LEVEL PYTHON ERROR IN generate_model_from_images_and_upload: {e_main}")
|
| 304 |
+
traceback.print_exc()
|
| 305 |
+
raise gr.Error(f"A critical error occurred in the Python backend: {e_main}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 306 |
|
| 307 |
# Interfaz Gradio
|
| 308 |
with gr.Blocks(delete_cache=(600, 600)) as demo:
|