ANLP_WS24_CA2/gpu_check.py

16 lines
546 B
Python

import torch
# Check if CUDA is available
cuda_available = torch.cuda.is_available()
print(f"CUDA available: {cuda_available}")
if cuda_available:
# Print the current CUDA device
current_device = torch.cuda.current_device()
print(f"Current CUDA device: {current_device}")
# Print the name of the current CUDA device
device_name = torch.cuda.get_device_name(current_device)
print(f"CUDA device name: {device_name}")
else:
print("CUDA is not available. Please check your CUDA installation and PyTorch configuration.")