-
Notifications
You must be signed in to change notification settings - Fork 2
/
FPA_Keras.py
91 lines (56 loc) · 3.39 KB
/
FPA_Keras.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
# -*- coding: utf-8 -*-
"""
Created on Thu Jun 27 22:29:24 2019
@author: MSabry
"""
from keras.layers import Conv2D, Conv2DTranspose, MaxPooling2D, BatchNormalization, Multiply, Add, Activation, AveragePooling2D
import keras.backend as K
class Feature_Pyramid_Attention:
def __init__(self, layer):
self.layer = layer
self.layer_shape = K.int_shape(layer)
def downsample(self):
max_pool_1 = MaxPooling2D(strides = 2)(self.layer)
conv7_1 = Conv2D(self.layer_shape[-1], 7, padding = 'same', kernel_initializer='he_normal')(max_pool_1)
conv7_1 = BatchNormalization()(conv7_1)
conv7_1 = Activation('relu')(conv7_1)
conv7_2 = Conv2D(self.layer_shape[-1], 7, padding = 'same', kernel_initializer='he_normal')(conv7_1)
conv7_2 = BatchNormalization()(conv7_2)
conv7_2 = Activation('relu')(conv7_2)
max_pool_2 = MaxPooling2D(strides = 2)(conv7_1)
conv5_1 = Conv2D(self.layer_shape[-1], 5, padding = 'same', kernel_initializer='he_normal')(max_pool_2)
conv5_1 = BatchNormalization()(conv5_1)
conv5_1 = Activation('relu')(conv5_1)
conv5_2 = Conv2D(self.layer_shape[-1], 5, padding = 'same', kernel_initializer='he_normal')(conv5_1)
conv5_2 = BatchNormalization()(conv5_2)
conv5_2 = Activation('relu')(conv5_2)
max_pool_3 = MaxPooling2D(strides = 2)(conv5_1)
conv3_1 = Conv2D(self.layer_shape[-1], 3, padding = 'same', kernel_initializer='he_normal')(max_pool_3)
conv3_1 = BatchNormalization()(conv3_1)
conv3_1 = Activation('relu')(conv3_1)
conv3_2 = Conv2D(self.layer_shape[-1], 3, padding = 'same', kernel_initializer='he_normal')(conv3_1)
conv3_2 = BatchNormalization()(conv3_2)
conv3_2 = Activation('relu')(conv3_2)
upsampled_8 = Conv2DTranspose(self.layer_shape[-1], 2, strides = (2,2), kernel_initializer='he_normal')(conv3_2)
added_1 = Add()([upsampled_8, conv5_2])
upsampled_16 = Conv2DTranspose(self.layer_shape[-1], 2, strides = (2,2), kernel_initializer='he_normal')(added_1)
added_2 = Add()([upsampled_16, conv7_2])
upsampled_32 = Conv2DTranspose(self.layer_shape[-1], 2, strides = (2,2), kernel_initializer='he_normal')(added_2)
return upsampled_32
def direct_branch(self):
conv1 = Conv2D(self.layer_shape[-1], 1, padding = 'valid', kernel_initializer='he_normal')(self.layer)
return conv1
#
def global_pooling_branch(self):
global_pool = AveragePooling2D(pool_size = self.layer_shape[1])(self.layer)
conv1_2 = Conv2D(self.layer_shape[-1], 1, padding = 'valid', kernel_initializer='he_normal')(global_pool)
upsampled = Conv2DTranspose(self.layer_shape[-1], self.layer_shape[1], kernel_initializer='he_normal')(conv1_2)
#
return upsampled
def FPA(self):
down_up_conved = self.downsample()
direct_conved = self.direct_branch()
gpb = self.global_pooling_branch()
multiplied = Multiply()([down_up_conved, direct_conved])
added_fpa = Add()([multiplied, gpb])
return added_fpa