Spaces:
Paused
Paused
import os | |
import sys | |
import yaml | |
import torch | |
import random | |
import numpy as np | |
import gradio as gr | |
from pathlib import Path | |
import tempfile | |
import shutil | |
# Add the current directory to Python path | |
sys.path.append(os.path.dirname(os.path.abspath(__file__))) | |
# Add packages directory to Python path | |
packages_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'packages') | |
if os.path.exists(packages_dir): | |
sys.path.append(packages_dir) | |
try: | |
from loop import loop | |
except ImportError as e: | |
print(f"Error importing loop: {e}") | |
print("Make sure all dependencies are installed correctly") | |
sys.exit(1) | |
# Global variables for configuration | |
DEFAULT_CONFIG = { | |
'output_path': './outputs', | |
'gpu': 0, | |
'seed': 99, | |
'clip_model': 'ViT-B/32', | |
'consistency_clip_model': 'ViT-B/32', | |
'consistency_vit_stride': 8, | |
'consistency_vit_layer': 11, | |
'mesh': './meshes/longsleeve.obj', | |
'target_mesh': './meshes_target/jacket_sdf_new.obj', | |
'retriangulate': 0, | |
'bsdf': 'diffuse', | |
'lr': 0.0025, | |
'epochs': 1800, | |
'clip_weight': 2.5, | |
'delta_clip_weight': 5, | |
'vgg_weight': 0.0, | |
'face_weight': 0, | |
'regularize_jacobians_weight': 0.15, | |
'consistency_loss_weight': 0, | |
'consistency_elev_filter': 30, | |
'consistency_azim_filter': 20, | |
'batch_size': 24, | |
'train_res': 512, | |
'resize_method': 'cubic', | |
'fov_min': 30.0, | |
'fov_max': 90.0, | |
'dist_min': 2.5, | |
'dist_max': 3.5, | |
'light_power': 5.0, | |
'elev_alpha': 1.0, | |
'elev_beta': 5.0, | |
'elev_max': 60.0, | |
'azim_min': 0.0, | |
'azim_max': 360.0, | |
'aug_loc': 1, | |
'aug_light': 1, | |
'aug_bkg': 0, | |
'adapt_dist': 1, | |
'log_interval': 5, | |
'log_interval_im': 150, | |
'log_elev': 0, | |
'log_fov': 60.0, | |
'log_dist': 3.0, | |
'log_res': 512, | |
'log_light_power': 3.0 | |
} | |
def process_garment(text_prompt, base_text_prompt, epochs, learning_rate, clip_weight, delta_clip_weight, progress=gr.Progress()): | |
""" | |
Main function to process garment generation | |
""" | |
try: | |
# Create a temporary output directory | |
with tempfile.TemporaryDirectory() as temp_dir: | |
# Update configuration | |
config = DEFAULT_CONFIG.copy() | |
config.update({ | |
'output_path': temp_dir, | |
'text_prompt': text_prompt, | |
'base_text_prompt': base_text_prompt, | |
'epochs': int(epochs), | |
'lr': float(learning_rate), | |
'clip_weight': float(clip_weight), | |
'delta_clip_weight': float(delta_clip_weight), | |
'gpu': 0 # Use first GPU | |
}) | |
# Set random seeds | |
random.seed(config['seed']) | |
os.environ['PYTHONHASHSEED'] = str(config['seed']) | |
np.random.seed(config['seed']) | |
torch.manual_seed(config['seed']) | |
torch.cuda.manual_seed(config['seed']) | |
torch.backends.cudnn.deterministic = True | |
progress(0.1, desc="Initializing...") | |
# Run the main processing loop | |
loop(config) | |
progress(0.9, desc="Processing complete, preparing output...") | |
# Look for output files | |
output_files = [] | |
for file_path in Path(temp_dir).rglob("*"): | |
if file_path.is_file() and file_path.suffix.lower() in ['.obj', '.png', '.jpg', '.jpeg', '.gif', '.mp4']: | |
output_files.append(str(file_path)) | |
if output_files: | |
return output_files[0] if len(output_files) == 1 else output_files | |
else: | |
return "Processing completed but no output files found." | |
except Exception as e: | |
return f"Error during processing: {str(e)}" | |
def create_interface(): | |
""" | |
Create the Gradio interface | |
""" | |
with gr.Blocks(title="Garment3DGen - 3D Garment Stylization", theme=gr.themes.Soft()) as interface: | |
gr.Markdown(""" | |
# Garment3DGen: 3D Garment Stylization and Texture Generation | |
This tool allows you to stylize 3D garments using text prompts. Upload a 3D mesh and describe the desired style to generate a new 3D garment. | |
## How to use: | |
1. Enter a text prompt describing the target style (e.g., "leather jacket with studs") | |
2. Enter a base text prompt describing the input mesh (e.g., "simple t-shirt") | |
3. Adjust the parameters as needed | |
4. Click "Generate" to start the process | |
**Note:** Processing may take several minutes depending on the number of epochs. | |
""") | |
with gr.Row(): | |
with gr.Column(scale=1): | |
gr.Markdown("### Input Parameters") | |
text_prompt = gr.Textbox( | |
label="Target Text Prompt", | |
placeholder="e.g., leather jacket with studs, denim jacket with patches", | |
value="leather jacket with studs" | |
) | |
base_text_prompt = gr.Textbox( | |
label="Base Text Prompt", | |
placeholder="e.g., simple t-shirt, basic long sleeve shirt", | |
value="simple t-shirt" | |
) | |
epochs = gr.Slider( | |
minimum=100, | |
maximum=3000, | |
value=1800, | |
step=100, | |
label="Number of Epochs", | |
info="More epochs = better quality but longer processing time" | |
) | |
learning_rate = gr.Slider( | |
minimum=0.0001, | |
maximum=0.01, | |
value=0.0025, | |
step=0.0001, | |
label="Learning Rate" | |
) | |
clip_weight = gr.Slider( | |
minimum=0.1, | |
maximum=10.0, | |
value=2.5, | |
step=0.1, | |
label="CLIP Weight" | |
) | |
delta_clip_weight = gr.Slider( | |
minimum=0.1, | |
maximum=20.0, | |
value=5.0, | |
step=0.1, | |
label="Delta CLIP Weight" | |
) | |
generate_btn = gr.Button("Generate 3D Garment", variant="primary") | |
with gr.Column(scale=1): | |
gr.Markdown("### Output") | |
output = gr.File(label="Generated 3D Garment") | |
status = gr.Textbox(label="Status", interactive=False) | |
# Connect the button to the processing function | |
generate_btn.click( | |
fn=process_garment, | |
inputs=[text_prompt, base_text_prompt, epochs, learning_rate, clip_weight, delta_clip_weight], | |
outputs=[output] | |
) | |
gr.Markdown(""" | |
## Tips for better results: | |
- Be specific in your text prompts | |
- Use descriptive terms for materials, colors, and styles | |
- The base text prompt should accurately describe your input mesh | |
- Higher epoch counts generally produce better results but take longer | |
- Experiment with different CLIP weights for different effects | |
## Technical Details: | |
This tool uses Neural Jacobian Fields and CLIP embeddings to deform and stylize 3D garment meshes. | |
The process involves optimizing the mesh geometry and texture to match the target text description. | |
""") | |
return interface | |
if __name__ == "__main__": | |
# Create and launch the interface | |
interface = create_interface() | |
interface.launch( | |
server_name="0.0.0.0", | |
server_port=7860, | |
share=False, | |
debug=True | |
) |