-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathsoft_label_mc.py
69 lines (60 loc) · 2.18 KB
/
soft_label_mc.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
import numpy as np
import tensorflow as tf
s = tf.Session()
def softlabel_mc(logits, labels, cls_num=5, rate=1.0, dtype=tf.float32):
fcls_num = float(cls_num)
if (rate <= 1.0):
limit = (1.0 / fcls_num) + (1.0 / fcls_num) * -np.log(1./fcls_num)
else:
limit = (1.0 / fcls_num) * rate
logits = tf.convert_to_tensor(logits, dtype=dtype)
stm = tf.nn.softmax(logits, axis=-1)
ohot = tf.one_hot(labels, cls_num)
filt = tf.convert_to_tensor(ohot * limit, dtype=dtype)
print('filt')
print(s.run(filt))
minus = filt - stm * ohot
print('minus')
print(s.run(minus))
sum_minus = tf.reduce_sum(minus, axis=-1)
print('resd')
print(s.run(sum_minus))
result = tf.where(sum_minus > 0.0, ohot, stm)
print('result')
print(s.run(result))
return result
def ce_loss(logits, labels, dtype=tf.float32):
logits = tf.convert_to_tensor(logits, dtype=dtype)
labels = tf.convert_to_tensor(labels, dtype=dtype)
stm = tf.nn.softmax(logits, axis=-1)
return -tf.reduce_sum(labels * tf.log(stm), -1)
a = np.array([[1., 2., 3.], [5., 2., 1.], [2., 3., 1.]])
print('softmax')
print(s.run(tf.nn.softmax(a, -1)))
new_labels = softlabel_mc(a, [2, 0, 0], cls_num=3, rate=1.5)
print(s.run(ce_loss(a, new_labels)))
# solution 2
def softlabel_ce(labels, logits, cls_num, rate=1.0, dtype=tf.float32):
fcls_num = float(cls_num)
if (rate <= 1.0):
limit = (1.0 / fcls_num) + (1.0 / fcls_num) * -np.log(1./fcls_num)
else:
limit = (1.0 / fcls_num) * rate
logits = tf.convert_to_tensor(logits, dtype=dtype)
stm = tf.nn.softmax(logits, axis=-1)
ohot = tf.one_hot(labels, cls_num)
filt = tf.convert_to_tensor(ohot * limit, dtype=dtype)
print('filt')
print(s.run(filt))
minus = filt - stm * ohot
print('minus')
print(s.run(minus))
sum_minus = tf.reduce_sum(minus, axis=-1)
print('resd')
print(s.run(sum_minus))
loss_mask = tf.where(sum_minus > 0.0, tf.ones_like(sum_minus), tf.zeros_like(sum_minus))
print('loss_mask')
print(s.run(loss_mask))
return -tf.reduce_sum(ohot * tf.log(stm), -1) * loss_mask
# return loss_mask
print(s.run(softlabel_ce([2, 0, 0], a, 3, rate=1.5)))