-
Notifications
You must be signed in to change notification settings - Fork 29
/
Copy pathutils.py
104 lines (78 loc) · 3.24 KB
/
utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
import os
import errno
import math
import shutil
import torch
import torch.nn as nn
import torch.nn.functional as F
def mkdir_p(path):
'''make dir if not exist'''
try:
os.mkdir(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
self.value = 0
self.ave = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.value = val
self.sum += val * n
self.count += n
self.ave = self.sum / self.count
def accuracy(output, target, topk=(1,)):
"""Computes the precision@k for the specified values of k"""
maxk = max(topk)
_, pred = output.topk(maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.view(1, -1).expand_as(pred))
correct_k = correct[:1].view(-1).float()
return correct_k
def get_prime(images, patch_size, interpolation='bicubic'):
"""Get down-sampled original image"""
prime = F.interpolate(images, size=[patch_size, patch_size], mode=interpolation, align_corners=True)
return prime
def get_patch(images, action_sequence, patch_size):
"""Get small patch of the original image"""
batch_size = images.size(0)
image_size = images.size(2)
patch_coordinate = torch.floor(action_sequence * (image_size - patch_size)).int()
patches = []
for i in range(batch_size):
per_patch = images[i, :,
(patch_coordinate[i, 0].item()): ((patch_coordinate[i, 0] + patch_size).item()),
(patch_coordinate[i, 1].item()): ((patch_coordinate[i, 1] + patch_size).item())]
patches.append(per_patch.view(1, per_patch.size(0), per_patch.size(1), per_patch.size(2)))
return torch.cat(patches, 0)
def adjust_learning_rate(optimizer, train_configuration, epoch, training_epoch_num, args):
"""Sets the learning rate"""
backbone_lr = 0.5 * train_configuration['backbone_lr'] * \
(1 + math.cos(math.pi * epoch / training_epoch_num))
if args.train_stage == 1:
fc_lr = 0.5 * train_configuration['fc_stage_1_lr'] * \
(1 + math.cos(math.pi * epoch / training_epoch_num))
elif args.train_stage == 3:
fc_lr = 0.5 * train_configuration['fc_stage_3_lr'] * \
(1 + math.cos(math.pi * epoch / training_epoch_num))
if train_configuration['train_model_prime']:
optimizer.param_groups[0]['lr'] = backbone_lr
optimizer.param_groups[1]['lr'] = backbone_lr
optimizer.param_groups[2]['lr'] = fc_lr
else:
optimizer.param_groups[0]['lr'] = backbone_lr
optimizer.param_groups[1]['lr'] = fc_lr
for param_group in optimizer.param_groups:
print(param_group['lr'])
def save_checkpoint(state, is_best, checkpoint='checkpoint', filename='checkpoint.pth.tar'):
filepath = checkpoint + '/' + filename
torch.save(state, filepath)
if is_best:
shutil.copyfile(filepath, checkpoint + '/model_best.pth.tar')