holmbergfan commited on
Commit
455406a
1 Parent(s): e27f81d

Create geeee

Browse files
Files changed (1) hide show
  1. geeee +42 -0
geeee ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import streamlit as st
3
+ from diffusers import CogVideoXPipeline
4
+ from diffusers.utils import export_to_video
5
+
6
+ st.title("CogVideoX Video Generation on CPU")
7
+
8
+ # Your prompt
9
+ prompt = "A panda, dressed in a small, red jacket and a tiny hat, sits on a wooden stool in a serene bamboo forest. The panda's fluffy paws strum a miniature acoustic guitar, producing soft, melodic tunes. Nearby, a few other pandas gather, watching curiously and some clapping in rhythm. Sunlight filters through the tall bamboo, casting a gentle glow on the scene. The panda's face is expressive, showing concentration and joy as it plays. The background includes a small, flowing stream and vibrant green foliage, enhancing the peaceful and magical atmosphere of this unique musical performance."
10
+
11
+ if st.button("Generate Video"):
12
+ with st.spinner("Generating video on CPU..."):
13
+ # Load the model and force it to run on CPU
14
+ pipe = CogVideoXPipeline.from_pretrained(
15
+ "THUDM/CogVideoX-2b",
16
+ torch_dtype=torch.float32 # Use float32 for CPU
17
+ )
18
+
19
+ # Ensure model uses CPU
20
+ pipe.to("cpu")
21
+
22
+ # VAE optimizations
23
+ pipe.vae.enable_slicing()
24
+ pipe.vae.enable_tiling()
25
+
26
+ # Generate the video
27
+ video = pipe(
28
+ prompt=prompt,
29
+ num_videos_per_prompt=1,
30
+ num_inference_steps=50,
31
+ num_frames=49,
32
+ guidance_scale=6,
33
+ generator=torch.manual_seed(42), # Use manual_seed for CPU
34
+ ).frames[0]
35
+
36
+ # Export video
37
+ export_to_video(video, "output.mp4", fps=8)
38
+
39
+ # Show the video in Streamlit
40
+ st.video("output.mp4")
41
+
42
+ st.success("Video generated successfully on CPU!")