2018-12-05 11:55:27 +01:00
|
|
|
import torch
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def check_cuda():
|
|
|
|
|
return torch.cuda.is_available()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
CUDA_AVAILABLE = check_cuda()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def init_seeds(seed=0):
|
|
|
|
|
torch.manual_seed(seed)
|
|
|
|
|
if CUDA_AVAILABLE:
|
|
|
|
|
torch.cuda.manual_seed(seed)
|
|
|
|
|
torch.cuda.manual_seed_all(seed)
|
2018-12-28 20:11:10 +01:00
|
|
|
# torch.cuda.set_device(0) # OPTIONAL: Set your GPU if multiple available
|
2018-12-05 11:55:27 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def select_device(force_cpu=False):
|
|
|
|
|
if force_cpu:
|
|
|
|
|
device = torch.device('cpu')
|
|
|
|
|
else:
|
|
|
|
|
device = torch.device('cuda:0' if CUDA_AVAILABLE else 'cpu')
|
2019-02-08 22:43:05 +01:00
|
|
|
print('Using ' + str(device) + '\n')
|
2018-12-05 11:55:27 +01:00
|
|
|
return device
|