-
Notifications
You must be signed in to change notification settings - Fork 1
/
model.py
executable file
·204 lines (171 loc) · 10.4 KB
/
model.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
#!/usr/bin/env python3
# coding: utf-8
from tensorflow.keras.layers import Input, Conv2D, concatenate
from tensorflow.keras.models import Model
from tensorflow.keras import losses
def steg_model(input_shape, pretrain=False):
lossFns = {
"hide_conv_f": losses.mean_squared_error,
"revl_conv_f": losses.mean_squared_error,
}
lossWeights = {
"hide_conv_f": 1.0,
"revl_conv_f": 0.8
}
# Inputs
secret = Input(shape=input_shape, name='secret')
cover = Input(shape=input_shape, name='cover')
# Prepare network - patches [3*3,3*3,3*3]
pconv_3x3 = Conv2D(128, kernel_size=3, padding="same",
activation='relu', name='prep_conv3x3_1')(secret)
pconv_3x3 = Conv2D(96, kernel_size=3, padding="same",
activation='relu', name='prep_conv3x3_2')(pconv_3x3)
pconv_3x3 = Conv2D(64, kernel_size=3, padding="same",
activation='relu', name='prep_conv3x3_3')(pconv_3x3)
pconv_3x3 = Conv2D(64, kernel_size=3, padding="same",
activation='relu', name='prep_conv3x3_4')(pconv_3x3)
pconv_4x4 = Conv2D(128, kernel_size=3, padding="same",
activation='relu', name='prep_conv4x4_1')(secret)
pconv_4x4 = Conv2D(96, kernel_size=3, padding="same",
activation='relu', name='prep_conv4x4_2')(pconv_4x4)
pconv_4x4 = Conv2D(64, kernel_size=3, padding="same",
activation='relu', name='prep_conv4x4_3')(pconv_4x4)
pconv_4x4 = Conv2D(64, kernel_size=3, padding="same",
activation='relu', name='prep_conv4x4_4')(pconv_4x4)
pconv_5x5 = Conv2D(128, kernel_size=3, padding="same",
activation='relu', name='prep_conv5x5_1')(secret)
pconv_5x5 = Conv2D(96, kernel_size=3, padding="same",
activation='relu', name='prep_conv5x5_2')(pconv_5x5)
pconv_5x5 = Conv2D(64, kernel_size=3, padding="same",
activation='relu', name='prep_conv5x5_3')(pconv_5x5)
pconv_5x5 = Conv2D(64, kernel_size=3, padding="same",
activation='relu', name='prep_conv5x5_4')(pconv_5x5)
pconcat_1 = concatenate(
[pconv_3x3, pconv_4x4, pconv_5x5], axis=3, name="prep_concat_1")
pconv_5x5 = Conv2D(64, kernel_size=3, padding="same",
activation='relu', name='prep_conv5x5_f')(pconcat_1)
pconv_4x4 = Conv2D(64, kernel_size=3, padding="same",
activation='relu', name='prep_conv4x4_f')(pconcat_1)
pconv_3x3 = Conv2D(64, kernel_size=3, padding="same",
activation='relu', name='prep_conv3x3_f')(pconcat_1)
pconcat_f1 = concatenate(
[pconv_5x5, pconv_4x4, pconv_3x3], axis=3, name="prep_concat_2")
# Hiding network - patches [3*3,3*3,3*3]
hconcat_h = concatenate([cover, pconcat_f1], axis=3, name="hide_concat_1")
hconv_3x3 = Conv2D(128, kernel_size=3, padding="same",
activation='relu', name='hide_conv3x3_1')(hconcat_h)
hconv_3x3 = Conv2D(96, kernel_size=3, padding="same",
activation='relu', name='hide_conv3x3_2')(hconv_3x3)
hconv_3x3 = Conv2D(64, kernel_size=3, padding="same",
activation='relu', name='hide_conv3x3_3')(hconv_3x3)
hconv_3x3 = Conv2D(64, kernel_size=3, padding="same",
activation='relu', name='hide_conv3x3_4')(hconv_3x3)
hconv_4x4 = Conv2D(128, kernel_size=3, padding="same",
activation='relu', name='hide_conv4x4_1')(hconcat_h)
hconv_4x4 = Conv2D(96, kernel_size=3, padding="same",
activation='relu', name='hide_conv4x4_2')(hconv_4x4)
hconv_4x4 = Conv2D(64, kernel_size=3, padding="same",
activation='relu', name='hide_conv4x4_3')(hconv_4x4)
hconv_4x4 = Conv2D(64, kernel_size=3, padding="same",
activation='relu', name='hide_conv4x4_4')(hconv_4x4)
hconv_5x5 = Conv2D(128, kernel_size=3, padding="same",
activation='relu', name='hide_conv5x5_1')(hconcat_h)
hconv_5x5 = Conv2D(96, kernel_size=3, padding="same",
activation='relu', name='hide_conv5x5_2')(hconv_5x5)
hconv_5x5 = Conv2D(64, kernel_size=3, padding="same",
activation='relu', name='hide_conv5x5_3')(hconv_5x5)
hconv_5x5 = Conv2D(64, kernel_size=3, padding="same",
activation='relu', name='hide_conv5x5_4')(hconv_5x5)
hconv_6x6 = Conv2D(128, kernel_size=3, padding="same",
activation='relu', name='hide_conv6x6_1')(hconcat_h)
hconv_6x6 = Conv2D(96, kernel_size=3, padding="same",
activation='relu', name='hide_conv6x6_2')(hconv_6x6)
hconv_6x6 = Conv2D(64, kernel_size=3, padding="same",
activation='relu', name='hide_conv6x6_3')(hconv_6x6)
hconv_6x6 = Conv2D(64, kernel_size=3, padding="same",
activation='relu', name='hide_conv6x6_4')(hconv_6x6)
hconcat_1 = concatenate(
[hconv_6x6, hconv_5x5, hconv_3x3, hconv_4x4, hconv_5x5], axis=3, name="hide_concat_2")
hconv_6x6 = Conv2D(64, kernel_size=3, padding="same",
activation='relu', name='hide_conv6x6_f')(hconcat_1)
hconv_5x5 = Conv2D(64, kernel_size=3, padding="same",
activation='relu', name='hide_conv5x5_f')(hconcat_1)
hconv_4x4 = Conv2D(64, kernel_size=3, padding="same",
activation='relu', name='hide_conv4x4_f')(hconcat_1)
hconv_3x3 = Conv2D(64, kernel_size=3, padding="same",
activation='relu', name='hide_conv3x3_f')(hconcat_1)
hconcat_f1 = concatenate(
[hconv_6x6, hconv_5x5, hconv_4x4, hconv_3x3], axis=3, name="hide_concat_3")
cover_pred = Conv2D(input_shape[2], kernel_size=1, padding="same",
name='hide_conv_f')(hconcat_f1)
# Reveal network - patches [3*3,3*3,3*3]
rconv_3x3 = Conv2D(64, kernel_size=3, padding="same",
activation='relu', name='revl_conv3x3_1')(cover_pred)
rconv_3x3 = Conv2D(64, kernel_size=3, padding="same",
activation='relu', name='revl_conv3x3_2')(rconv_3x3)
rconv_3x3 = Conv2D(96, kernel_size=3, padding="same",
activation='relu', name='revl_conv3x3_3')(rconv_3x3)
rconv_3x3 = Conv2D(128, kernel_size=3, padding="same",
activation='relu', name='revl_conv3x3_4')(rconv_3x3)
rconv_4x4 = Conv2D(64, kernel_size=3, padding="same",
activation='relu', name='revl_conv4x4_1')(cover_pred)
rconv_4x4 = Conv2D(64, kernel_size=3, padding="same",
activation='relu', name='revl_conv4x4_2')(rconv_4x4)
rconv_4x4 = Conv2D(96, kernel_size=3, padding="same",
activation='relu', name='revl_conv4x4_3')(rconv_4x4)
rconv_4x4 = Conv2D(128, kernel_size=3, padding="same",
activation='relu', name='revl_conv4x4_4')(rconv_4x4)
rconv_5x5 = Conv2D(64, kernel_size=3, padding="same",
activation='relu', name='revl_conv5x5_1')(cover_pred)
rconv_5x5 = Conv2D(64, kernel_size=3, padding="same",
activation='relu', name='revl_conv5x5_2')(rconv_5x5)
rconv_5x5 = Conv2D(96, kernel_size=3, padding="same",
activation='relu', name='revl_conv5x5_3')(rconv_5x5)
rconv_5x5 = Conv2D(128, kernel_size=3, padding="same",
activation='relu', name='revl_conv5x5_4')(rconv_5x5)
rconv_6x6 = Conv2D(64, kernel_size=3, padding="same",
activation='relu', name='revl_conv6x6_1')(cover_pred)
rconv_6x6 = Conv2D(64, kernel_size=3, padding="same",
activation='relu', name='revl_conv6x6_2')(rconv_6x6)
rconv_6x6 = Conv2D(96, kernel_size=3, padding="same",
activation='relu', name='revl_conv6x6_3')(rconv_6x6)
rconv_6x6 = Conv2D(128, kernel_size=3, padding="same",
activation='relu', name='revl_conv6x6_4')(rconv_6x6)
rconv_7x7 = Conv2D(64, kernel_size=3, padding="same",
activation='relu', name='revl_conv7x7_1')(cover_pred)
rconv_7x7 = Conv2D(64, kernel_size=3, padding="same",
activation='relu', name='revl_conv7x7_2')(rconv_7x7)
rconv_7x7 = Conv2D(96, kernel_size=3, padding="same",
activation='relu', name='revl_conv7x7_3')(rconv_7x7)
rconv_7x7 = Conv2D(128, kernel_size=3, padding="same",
activation='relu', name='revl_conv7x7_4')(rconv_7x7)
rconv_8x8 = Conv2D(64, kernel_size=3, padding="same",
activation='relu', name='revl_conv8x8_1')(cover_pred)
rconv_8x8 = Conv2D(64, kernel_size=3, padding="same",
activation='relu', name='revl_conv8x8_2')(rconv_8x8)
rconv_8x8 = Conv2D(96, kernel_size=3, padding="same",
activation='relu', name='revl_conv8x8_3')(rconv_8x8)
rconv_8x8 = Conv2D(128, kernel_size=3, padding="same",
activation='relu', name='revl_conv8x8_4')(rconv_8x8)
rconcat_1 = concatenate(
[rconv_3x3, rconv_4x4, rconv_5x5, rconv_6x6, rconv_7x7, rconv_8x8], axis=3, name="revl_concat_1")
rconv_8x8 = Conv2D(128, kernel_size=3, padding="same",
activation='relu', name='revl_conv8x8_f')(rconcat_1)
rconv_7x7 = Conv2D(128, kernel_size=3, padding="same",
activation='relu', name='revl_conv7x7_f')(rconcat_1)
rconv_6x6 = Conv2D(128, kernel_size=3, padding="same",
activation='relu', name='revl_conv6x6_f')(rconcat_1)
rconv_5x5 = Conv2D(128, kernel_size=3, padding="same",
activation='relu', name='revl_conv5x5_f')(rconcat_1)
rconv_4x4 = Conv2D(128, kernel_size=3, padding="same",
activation='relu', name='revl_conv4x4_f')(rconcat_1)
rconv_3x3 = Conv2D(128, kernel_size=3, padding="same",
activation='relu', name='revl_conv3x3_f')(rconcat_1)
rconcat_f1 = concatenate(
[rconv_8x8, rconv_7x7, rconv_6x6, rconv_5x5, rconv_4x4, rconv_3x3], axis=3, name="revl_concat_2")
secret_pred = Conv2D(input_shape[2], kernel_size=1, padding="same",
name='revl_conv_f')(rconcat_f1)
model = Model(inputs=[secret, cover], outputs=[secret_pred, cover_pred])
# Compile model
model.compile(optimizer='adam', loss=lossFns, loss_weights=lossWeights)
return model