[5d8f6c]: / CaraNet / utils / utils.py

Download this file

62 lines (51 with data), 1.7 kB

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
import torch
import numpy as np
from thop import profile
from thop import clever_format
def clip_gradient(optimizer, grad_clip):
"""
For calibrating misalignment gradient via cliping gradient technique
:param optimizer:
:param grad_clip:
:return:
"""
for group in optimizer.param_groups:
for param in group['params']:
if param.grad is not None:
param.grad.data.clamp_(-grad_clip, grad_clip)
def adjust_lr(optimizer, init_lr, epoch, decay_rate=0.1, decay_epoch=30):
decay = decay_rate ** (epoch // decay_epoch)
for param_group in optimizer.param_groups:
param_group['lr'] *= decay
class AvgMeter(object):
def __init__(self, num=40):
self.num = num
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
self.losses = []
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
self.losses.append(val)
def show(self):
return torch.mean(torch.stack(self.losses[np.maximum(len(self.losses)-self.num, 0):]))
def CalParams(model, input_tensor):
"""
Usage:
Calculate Params and FLOPs via [THOP](https://github.com/Lyken17/pytorch-OpCounter)
Necessarity:
from thop import profile
from thop import clever_format
:param model:
:param input_tensor:
:return:
"""
flops, params = profile(model, inputs=(input_tensor,))
flops, params = clever_format([flops, params], "%.3f")
print('[Statistics Information]\nFLOPs: {}\nParams: {}'.format(flops, params))