Spaces:
Sleeping
Sleeping
print hw info
Browse files
app.py
CHANGED
@@ -7,6 +7,8 @@ from transformers import AutoConfig, AutoTokenizer, AutoModelForCausalLM, AutoMo
|
|
7 |
|
8 |
|
9 |
torch_device = "cuda" if torch.cuda.is_available() else "cpu"
|
|
|
|
|
10 |
|
11 |
|
12 |
@lru_cache(maxsize=1) # only cache the latest model
|
|
|
7 |
|
8 |
|
9 |
torch_device = "cuda" if torch.cuda.is_available() else "cpu"
|
10 |
+
print("Running on device:", torch_device)
|
11 |
+
print("CPU threads:", torch.get_num_threads())
|
12 |
|
13 |
|
14 |
@lru_cache(maxsize=1) # only cache the latest model
|