|
import torch |
|
from diffusers import ShapEPipeline |
|
from diffusers.utils import export_to_gif |
|
import PIL.Image |
|
|
|
def generate_3d_model(prompt, output_path="assistant_3d.gif"): |
|
""" |
|
Generate a 3D model using ShapE optimized for CPU usage |
|
""" |
|
try: |
|
|
|
pipe = ShapEPipeline.from_pretrained( |
|
"openai/shap-e", |
|
torch_dtype=torch.float32, |
|
low_cpu_mem_usage=True |
|
).to("cpu") |
|
|
|
|
|
outputs = pipe( |
|
prompt, |
|
num_inference_steps=32, |
|
frame_size=32, |
|
guidance_scale=10.0, |
|
) |
|
|
|
|
|
if not isinstance(outputs.images[0], PIL.Image.Image): |
|
images = [PIL.Image.fromarray(img) for img in outputs.images] |
|
else: |
|
images = outputs.images |
|
|
|
|
|
gif_path = export_to_gif(images, output_path) |
|
print(f"Successfully created GIF at: {gif_path}") |
|
return gif_path |
|
|
|
except Exception as e: |
|
print(f"Error during generation: {e}") |
|
print(f"Error type: {type(e)}") |
|
print(f"Full error details: {str(e)}") |
|
raise |
|
|
|
if __name__ == "__main__": |
|
prompt = "A gentle AI voice assistant constructed from a circle ring and 3 lines that fly alongside the circle" |
|
try: |
|
generate_3d_model(prompt) |
|
except Exception as e: |
|
print(f"Generation failed: {e}") |