-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathsodeep_utils.py
87 lines (67 loc) · 2.93 KB
/
sodeep_utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
"""
****************** COPYRIGHT AND CONFIDENTIALITY INFORMATION ******************
Copyright (c) 2019 [Thomson Licensing]
All Rights Reserved
This program contains proprietary information which is a trade secret/business \
secret of [Thomson Licensing] and is protected, even if unpublished, under \
applicable Copyright laws (including French droit d'auteur) and/or may be \
subject to one or more patent(s).
Recipient is to retain this program in confidence and is not permitted to use \
or make copies thereof other than as permitted in a written agreement with \
[Thomson Licensing] unless otherwise expressly allowed by applicable laws or \
by [Thomson Licensing] under express agreement.
Thomson Licensing is a company of the group TECHNICOLOR
*******************************************************************************
This scripts permits one to reproduce training and experiments of:
Engilberge, M., Chevallier, L., Pérez, P., & Cord, M. (2019, June).
SoDeep: A Sorting Deep Net to Learn Ranking Loss Surrogates.
In Proceedings of CVPR
Author: Martin Engilberge
"""
import torch
def get_rank(batch_score, dim=0):
rank = torch.argsort(batch_score, dim=dim)
rank = torch.argsort(rank, dim=dim)
rank = (rank * -1) + batch_score.size(dim)
rank = rank.float()
rank = rank / batch_score.size(dim)
return rank
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
def build_vocab(sentences):
vocab = {}
for sentence in sentences:
for word in sentence:
try:
vocab[word] += 1
except KeyError:
vocab[word] = 1
return vocab
def save_checkpoint(state, is_best, model_name, epoch):
if is_best:
torch.save(state, './weights/best_' + model_name + ".pth.tar")
def count_parameters(model):
return sum(p.numel() for p in model.parameters() if p.requires_grad)
def log_epoch(logger, epoch, train_loss, val_loss, lr, batch_train, batch_val, data_train, data_val):
logger.add_scalar('Loss/Train', train_loss, epoch)
logger.add_scalar('Loss/Val', val_loss, epoch)
logger.add_scalar('Learning/Rate', lr, epoch)
logger.add_scalar('Learning/Overfitting', val_loss / train_loss, epoch)
logger.add_scalar('Time/Train/Batch Processing', batch_train, epoch)
logger.add_scalar('Time/Val/Batch Processing', batch_val, epoch)
logger.add_scalar('Time/Train/Data loading', data_train, epoch)
logger.add_scalar('Time/Val/Data loading', data_val, epoch)
def flatten(l):
return [item for sublist in l for item in sublist]