forked from modern-fortran/neural-fortran
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathnf_layer_constructors_submodule.f90
137 lines (101 loc) · 3.6 KB
/
nf_layer_constructors_submodule.f90
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
submodule(nf_layer_constructors) nf_layer_constructors_submodule
use nf_layer, only: layer
use nf_conv2d_layer, only: conv2d_layer
use nf_dense_layer, only: dense_layer
use nf_flatten_layer, only: flatten_layer
use nf_input1d_layer, only: input1d_layer
use nf_input3d_layer, only: input3d_layer
use nf_maxpool2d_layer, only: maxpool2d_layer
use nf_reshape_layer, only: reshape3d_layer
use nf_activation, only: activation_function, relu, sigmoid
implicit none
contains
module function conv2d(filters, kernel_size, activation) result(res)
integer, intent(in) :: filters
integer, intent(in) :: kernel_size
class(activation_function), intent(in), optional :: activation
type(layer) :: res
class(activation_function), allocatable :: activation_tmp
res % name = 'conv2d'
if (present(activation)) then
allocate(activation_tmp, source=activation)
else
allocate(activation_tmp, source=relu())
end if
res % activation = activation_tmp % get_name()
allocate( &
res % p, &
source=conv2d_layer(filters, kernel_size, activation_tmp) &
)
end function conv2d
module function dense(layer_size, activation) result(res)
integer, intent(in) :: layer_size
class(activation_function), intent(in), optional :: activation
type(layer) :: res
class(activation_function), allocatable :: activation_tmp
res % name = 'dense'
res % layer_shape = [layer_size]
if (present(activation)) then
allocate(activation_tmp, source=activation)
else
allocate(activation_tmp, source=sigmoid())
end if
res % activation = activation_tmp % get_name()
allocate(res % p, source=dense_layer(layer_size, activation_tmp))
end function dense
module function flatten() result(res)
type(layer) :: res
res % name = 'flatten'
allocate(res % p, source=flatten_layer())
end function flatten
module function input1d(layer_size) result(res)
integer, intent(in) :: layer_size
type(layer) :: res
res % name = 'input'
res % layer_shape = [layer_size]
res % input_layer_shape = [integer ::]
allocate(res % p, source=input1d_layer(layer_size))
res % initialized = .true.
end function input1d
module function input3d(layer_shape) result(res)
integer, intent(in) :: layer_shape(3)
type(layer) :: res
res % name = 'input'
res % layer_shape = layer_shape
res % input_layer_shape = [integer ::]
allocate(res % p, source=input3d_layer(layer_shape))
res % initialized = .true.
end function input3d
module function maxpool2d(pool_size, stride) result(res)
integer, intent(in) :: pool_size
integer, intent(in), optional :: stride
integer :: stride_
type(layer) :: res
if (pool_size < 2) &
error stop 'pool_size must be >= 2 in a maxpool2d layer'
! Stride defaults to pool_size if not provided
if (present(stride)) then
stride_ = stride
else
stride_ = pool_size
end if
if (stride_ < 1) &
error stop 'stride must be >= 1 in a maxpool2d layer'
res % name = 'maxpool2d'
allocate( &
res % p, &
source=maxpool2d_layer(pool_size, stride_) &
)
end function maxpool2d
module function reshape(output_shape) result(res)
integer, intent(in) :: output_shape(:)
type(layer) :: res
res % name = 'reshape'
res % layer_shape = output_shape
if (size(output_shape) == 3) then
allocate(res % p, source=reshape3d_layer(output_shape))
else
error stop 'size(output_shape) of the reshape layer must == 3'
end if
end function reshape
end submodule nf_layer_constructors_submodule