Deep Learning

pytorch gpu(cuda) 정보 확인

Kimhj 2024. 1. 17. 14:49
if torch.cuda.is_available():
    # GPU 디바이스의 개수
    num_gpu = torch.cuda.device_count()
    print(f"Number of GPUs: {num_gpu}")

    # 각 GPU의 이름과 메모리 용량 확인
    for i in range(num_gpu):
        gpu_name = torch.cuda.get_device_name(i)
        gpu_memory = torch.cuda.get_device_properties(i).total_memory
        print(f"GPU {i + 1}: {gpu_name}, Memory: {gpu_memory / (1024 ** 3):.2f} GB")
else:
    print("No GPU available.")
    
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
print(device)