joaogante HF staff commited on
Commit
7cb6518
1 Parent(s): e432e51

print hw info

Browse files
Files changed (1) hide show
  1. app.py +2 -0
app.py CHANGED
@@ -7,6 +7,8 @@ from transformers import AutoConfig, AutoTokenizer, AutoModelForCausalLM, AutoMo
7
 
8
 
9
  torch_device = "cuda" if torch.cuda.is_available() else "cpu"
 
 
10
 
11
 
12
  @lru_cache(maxsize=1) # only cache the latest model
 
7
 
8
 
9
  torch_device = "cuda" if torch.cuda.is_available() else "cpu"
10
+ print("Running on device:", torch_device)
11
+ print("CPU threads:", torch.get_num_threads())
12
 
13
 
14
  @lru_cache(maxsize=1) # only cache the latest model