Ruslan Magana Vsevolodovna commited on
Commit
76c4f36
1 Parent(s): 3ad339f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -4
app.py CHANGED
@@ -24,9 +24,8 @@ model = AutoModelForSeq2SeqLM.from_pretrained("sshleifer/distilbart-cnn-12-6")
24
  device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
25
  print(device)
26
 
27
- def log_gpu_memory():
28
- print(subprocess.check_output('nvidia-smi').decode('utf-8'))
29
-
30
  #log_gpu_memory()
31
 
32
 
@@ -65,7 +64,7 @@ def get_output_video(text):
65
  models_root=models_root,
66
  is_reusable=True,
67
  is_verbose=True,
68
- dtype=torch.float16 if fp16 else torch.float32 ,#param ["float32", "float16", "bfloat16"]
69
  #device='cuda' #'cpu'
70
  )
71
  #log_gpu_memory()
 
24
  device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
25
  print(device)
26
 
27
+ #def log_gpu_memory():
28
+ # print(subprocess.check_output('nvidia-smi').decode('utf-8'))
 
29
  #log_gpu_memory()
30
 
31
 
 
64
  models_root=models_root,
65
  is_reusable=True,
66
  is_verbose=True,
67
+ dtype=torch.float16 if fp16 else torch.float32, #param ["float32", "float16", "bfloat16"]
68
  #device='cuda' #'cpu'
69
  )
70
  #log_gpu_memory()