kadirnar commited on
Commit
a3e2fa9
1 Parent(s): c44f61d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +24 -13
app.py CHANGED
@@ -3,17 +3,16 @@ from huggingface_hub import hf_hub_download
3
  import subprocess
4
  import tempfile
5
  import shutil
6
- import spaces
7
-
8
 
9
  def download_model(repo_id, model_name):
10
  model_path = hf_hub_download(repo_id=repo_id, filename=model_name)
11
  return model_path
12
 
13
- @spaces.GPU
14
- def run_inference(model_name, prompt_path):
15
  repo_id = "hpcai-tech/Open-Sora"
16
 
 
17
  config_mapping = {
18
  "OpenSora-v1-16x256x256.pth": "configs/opensora/inference/16x256x256.py",
19
  "OpenSora-v1-HQ-16x256x256.pth": "configs/opensora/inference/16x512x512.py",
@@ -23,10 +22,17 @@ def run_inference(model_name, prompt_path):
23
  config_path = config_mapping[model_name]
24
  ckpt_path = download_model(repo_id, model_name)
25
 
 
 
 
 
 
 
26
  with open(config_path, 'r') as file:
27
  config_content = file.read()
28
- config_content = config_content.replace('prompt_path = "./assets/texts/t2v_samples.txt"', f'prompt_path = "{prompt_path}"')
29
 
 
30
  with tempfile.NamedTemporaryFile('w', delete=False) as temp_file:
31
  temp_file.write(config_content)
32
  temp_config_path = temp_file.name
@@ -36,12 +42,16 @@ def run_inference(model_name, prompt_path):
36
  "scripts/inference.py", temp_config_path,
37
  "--ckpt-path", ckpt_path
38
  ]
39
- subprocess.run(cmd, capture_output=True, text=True)
40
- shutil.rmtree(temp_file.name)
 
 
 
41
 
42
- # Assuming the output video is saved in a known path, e.g., "./output/video.mp4"
43
- output_video_path = "./output/video.mp4"
44
- return output_video_path
 
45
 
46
  def main():
47
  gr.Interface(
@@ -52,11 +62,12 @@ def main():
52
  "OpenSora-v1-HQ-16x256x256.pth",
53
  "OpenSora-v1-HQ-16x512x512.pth"
54
  ], label="Model Selection"),
55
- gr.Textbox(label="Prompt Path", value="./assets/texts/t2v_samples.txt")
56
  ],
57
- outputs=gr.Video(label="Output Video"),
58
  title="Open-Sora Inference",
59
- description="Run Open-Sora Inference with Custom Parameters"
 
60
  ).launch()
61
 
62
  if __name__ == "__main__":
 
3
  import subprocess
4
  import tempfile
5
  import shutil
6
+ import os
 
7
 
8
  def download_model(repo_id, model_name):
9
  model_path = hf_hub_download(repo_id=repo_id, filename=model_name)
10
  return model_path
11
 
12
+ def run_inference(model_name, prompt_text):
 
13
  repo_id = "hpcai-tech/Open-Sora"
14
 
15
+ # Map model names to their respective configuration files
16
  config_mapping = {
17
  "OpenSora-v1-16x256x256.pth": "configs/opensora/inference/16x256x256.py",
18
  "OpenSora-v1-HQ-16x256x256.pth": "configs/opensora/inference/16x512x512.py",
 
22
  config_path = config_mapping[model_name]
23
  ckpt_path = download_model(repo_id, model_name)
24
 
25
+ # Save prompt_text to a temporary text file
26
+ prompt_file = tempfile.NamedTemporaryFile(delete=False, suffix=".txt", mode='w')
27
+ prompt_file.write(prompt_text)
28
+ prompt_file.close()
29
+
30
+ # Read and update the configuration file
31
  with open(config_path, 'r') as file:
32
  config_content = file.read()
33
+ config_content = config_content.replace('prompt_path = "./assets/texts/t2v_samples.txt"', f'prompt_path = "{prompt_file.name}"')
34
 
35
+ # Create a temporary file for the updated configuration
36
  with tempfile.NamedTemporaryFile('w', delete=False) as temp_file:
37
  temp_file.write(config_content)
38
  temp_config_path = temp_file.name
 
42
  "scripts/inference.py", temp_config_path,
43
  "--ckpt-path", ckpt_path
44
  ]
45
+ result = subprocess.run(cmd, capture_output=True, text=True)
46
+
47
+ # Clean up the temporary files
48
+ os.remove(temp_file.name)
49
+ os.remove(prompt_file.name)
50
 
51
+ if result.returncode == 0:
52
+ return "Inference completed successfully.", result.stdout
53
+ else:
54
+ return "Error occurred:", result.stderr
55
 
56
  def main():
57
  gr.Interface(
 
62
  "OpenSora-v1-HQ-16x256x256.pth",
63
  "OpenSora-v1-HQ-16x512x512.pth"
64
  ], label="Model Selection"),
65
+ gr.Textbox(label="Prompt Text", placeholder="Enter prompt text here")
66
  ],
67
+ outputs="text",
68
  title="Open-Sora Inference",
69
+ description="Run Open-Sora Inference with Custom Parameters",
70
+ share=True # Set to True to create a public link
71
  ).launch()
72
 
73
  if __name__ == "__main__":