[4fa73e]: / pytorch / utils / misc.py

Download this file

37 lines (30 with data), 1.4 kB

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
import time
import logging
def timeit(f):
""" Decorator to time Any Function """
def timed(*args, **kwargs):
start_time = time.time()
result = f(*args, **kwargs)
end_time = time.time()
seconds = end_time - start_time
logging.getLogger("Timer").info(" [-] %s : %2.5f sec, which is %2.5f min, which is %2.5f hour" %
(f.__name__, seconds, seconds / 60, seconds / 3600))
return result
return timed
def print_cuda_statistics():
logger = logging.getLogger("Cuda Statistics")
import sys
from subprocess import call
import torch
logger.info('__Python VERSION: {}'.format(sys.version))
logger.info('__pyTorch VERSION: {}'.format(torch.__version__))
logger.info('__CUDA VERSION')
# call(["nvcc", "--version"])
# logger.info('__CUDNN VERSION: {}'.format(torch.backends.cudnn.version()))
# logger.info('__Number CUDA Devices: {}'.format(torch.cuda.device_count()))
# logger.info('__Devices')
call(["nvidia-smi", "--format=csv",
"--query-gpu=index,name,driver_version,memory.total,memory.used,memory.free"])
logger.info('Active CUDA Device: GPU {}'.format(torch.cuda.current_device()))
logger.info('Available devices {}'.format(torch.cuda.device_count()))
logger.info('Current cuda device {}'.format(torch.cuda.current_device()))