arisegu commited on
Commit
a9e4715
·
verified ·
1 Parent(s): 9d81c90

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -7
app.py CHANGED
@@ -1,15 +1,27 @@
1
  import streamlit as st
2
-
3
- # x = st.slider('Select a value')
4
- # st.write(x, 'squared is', x * x)
5
  import torch
6
  from diffusers import DiffusionPipeline, DPMSolverMultistepScheduler
7
  from diffusers.utils import export_to_video
8
 
9
- pipe = DiffusionPipeline.from_pretrained("damo-vilab/text-to-video-ms-1.7b", torch_dtype=torch.float16, variant="fp16")
10
- pipe.scheduler = DPMSolverMultistepScheduler.from_config(pipe.scheduler.config)
11
- pipe.enable_model_cpu_offload()
 
 
 
 
 
 
 
 
 
 
 
12
 
13
  prompt = "Pop international experimental music"
14
- video_frames = pipe(prompt, num_inference_steps=25).frames
 
 
 
 
15
  video_path = export_to_video(video_frames)
 
1
  import streamlit as st
 
 
 
2
  import torch
3
  from diffusers import DiffusionPipeline, DPMSolverMultistepScheduler
4
  from diffusers.utils import export_to_video
5
 
6
+ # Explicitly set the device to CPU
7
+ device = torch.device("cpu")
8
+
9
+ # Load the model onto the CPU
10
+ pipe = DiffusionPipeline.from_pretrained(
11
+ "damo-vilab/text-to-video-ms-1.7b",
12
+ torch_dtype=torch.float32 # Use float32 for CPU
13
+ ).to(device)
14
+
15
+ # Set the scheduler to use CPU
16
+ pipe.scheduler = DPMSolverMultistepScheduler.from_config(pipe.scheduler.config).to(device)
17
+
18
+ # Disable model offloading if running on CPU
19
+ # pipe.enable_model_cpu_offload() # This line should be removed or commented out
20
 
21
  prompt = "Pop international experimental music"
22
+
23
+ # Generate the video frames on the CPU
24
+ video_frames = pipe(prompt, num_inference_steps=25, device=device).frames
25
+
26
+ # Export the frames to a video file
27
  video_path = export_to_video(video_frames)