File size: 396 Bytes
e348bce b1fef32 e348bce |
1 2 3 4 5 6 7 8 9 10 11 |
import torch
if torch.cuda.is_available():
device = torch.device("cuda")
print("Torch version:",torch.__version__)
print("Is CUDA enabled?",torch.cuda.is_available())
print("Number of GPUs available:", torch.cuda.device_count())
print(f"GPU {torch.cuda.get_device_name(0)} is available")
else:
device = torch.device("cpu")
print("No GPU available, using CPU instead") |