File size: 229 Bytes
68286c7 |
1 2 3 4 5 6 7 8 9 10 11 |
import torch
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
if torch.backends.mps.is_available():
device = torch.device("mps")
else:
device = torch.device("cpu")
print(f"Using device: {device}")
|