|
import torch |
|
from diffusers import ShapEPipeline |
|
import trimesh |
|
import numpy as np |
|
|
|
def generate_3d_model(prompt, output_path="assistant_3d.obj"): |
|
""" |
|
Generate a 3D model using ShapE and export it in a Blender-compatible format |
|
""" |
|
try: |
|
|
|
pipe = ShapEPipeline.from_pretrained( |
|
"openai/shap-e", |
|
torch_dtype=torch.float32, |
|
low_cpu_mem_usage=True |
|
).to("cpu") |
|
|
|
|
|
outputs = pipe( |
|
prompt, |
|
num_inference_steps=16, |
|
frame_size=24, |
|
guidance_scale=7.5 |
|
) |
|
|
|
|
|
mesh = outputs.meshes[0] |
|
|
|
|
|
verts = mesh.verts.detach().cpu().numpy() |
|
faces = mesh.faces.detach().cpu().numpy() |
|
|
|
|
|
mesh_obj = trimesh.Trimesh(vertices=verts, faces=faces) |
|
|
|
|
|
if output_path.endswith('.obj'): |
|
mesh_obj.export(output_path) |
|
elif output_path.endswith('.glb'): |
|
mesh_obj.export(output_path) |
|
elif output_path.endswith('.stl'): |
|
mesh_obj.export(output_path) |
|
|
|
print(f"Successfully exported 3D model to: {output_path}") |
|
return output_path |
|
|
|
except Exception as e: |
|
print(f"Error during generation: {e}") |
|
print(f"Error type: {type(e)}") |
|
print(f"Full error details: {str(e)}") |
|
raise |
|
|
|
if __name__ == "__main__": |
|
prompt = "a simple ring" |
|
try: |
|
|
|
generate_3d_model(prompt, "assistant_3d.obj") |
|
except Exception as e: |
|
print(f"Generation failed: {e}") |