import torch | |
if torch.cuda.is_available(): | |
print("CUDA GPU is available") | |
print("GPU:", torch.cuda.get_device_name(0)) | |
else: | |
print("CUDA GPU is not available") | |
import torch | |
if torch.cuda.is_available(): | |
print("CUDA GPU is available") | |
print("GPU:", torch.cuda.get_device_name(0)) | |
else: | |
print("CUDA GPU is not available") | |